var/home/core/zuul-output/0000755000175000017500000000000015140152371014524 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015140165723015475 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000335366715140165537020304 0ustar corecore_ikubelet.log_o[;r)Br'o-n(!9t%Cs7}g/غIs,r.k9GfB…Q~"mv?_eGbuuțx{w7ݭ7֫}% oo/q3m^]/o?8.7oW}ʋghewx/mX,ojŻ ^Tb3b#׳:}=p7뼝ca㑔`e0I1Q!&ѱ[/o^{W-{t3_U|6 x)K#/5ΌR"ggóisR)N %emOQ/Ϋ[oa0vs68/Jʢ ܚʂ9ss3+aô٥J}{37FEbп3 FKX1QRQlrTvb)E,s)Wɀ;$#LcdHMeBmFR5]!PI6f٘"y/(":[#;`1}+7 s'ϨF&%8'# $9b"r>B)GF%\b> Ff/Bp 4YH~BŊ6EZ|^߸3%L[EC 7gg/碓@e=Vn)h\\lwCzDiQJxTsL] ,=M`nͷ~Vܯ5n|X&pNz7l9HGAr Mme)M,O!Xa~YB ɻ!@J$ty#&i 5ܘ=ЂK]IIɻ]rwbXh)g''H_`!GKF5/O]Zڢ>:O񨡺ePӋ&56zGnL!?lJJYq=Wo/"IyQ4\:y|6h6dQX0>HTG5QOuxMe 1׶/5άRIoUxR ks J)'u4iLaNIc2qdNA&aLQVD R0*06V۽棬mpھ*V I{a 0Ҟҝ>Ϗ ,ȓw`Ȅ/2Zjǽ}W4D)3N*[kPF =trSE *b9ē7$ M_8.Ç"q ChCMAgSdL0#W+CUu"k"圀̲F9,,&h'ZJz4U\d +( 7EqڏuC+]CEF 8'9@OVvnNbm: X„RDXfיa }fqG*YƩ{P0K=( $hC=h2@M+ `@P4Re]1he}k|]eO,v^ȹ [=zX[tꆯI7c<ۃ'B쿫dIc*Qqk&60XdGY!D ' @{!b4ִ s Exb 5dKߤKߒ'&YILұ4q6y{&G`%$8Tt ȥ#5vGVO2Қ;m#NS8}d0Q?zLV3\LuOx:,|$;rVauNjk-ؘPꐤ`FD'JɻXC&{>.}y7Z,).Y톯h7n%PAUË?/,z_jx܍>М>ӗom$rۇnu~Y݇̇TIwӜ'}׃nxuoỴRZ&Yzbm ]) %1(Y^9{q"4e?x+ [Vz;E|d1&ږ/0-Vb=SSO|k1A[|gbͧɇد;:X:@;afU=Sru CK >Y%LwM*t{zƝ$;ȾjHim @tBODɆj>0st\t@HTu( v e`H*1aK`3CmF1K>*Mk{_'֜dN${OT-n,'}6ȴ .#Sqη9]5zoX#ZVOy4%-Lq6dACYm*H@:FUф(vcD%F"i ' VVdmcOTKpwq.M?m12N[=tuw}opYG]2u<ΰ+a1tHayɒ aY(P*aaʨ@ΰ<pX X{k[%Egl1$9  ֲQ$'dJVE%mT{z`R$77.N|b>harNJ(Bň0ae3V#b,PY0TEu1L/]MTB4$`H6NI\nbǛ*AyA\(u|@ [h-,j7gDTÎ4oWJ$j!fJ'3[n )ܗKj/jUSsȕD $([LH%xa1yrOI,($F{ձ7*Oy 6EK( EF #31J8mN .TTF9㕴/5~RxCe,&v3,JE- ZF5%Da,Gܠ*qI@qlG6s푻jÝ$ >8ȕ$eZ1j[h0SH,qf<"${/ksBK}xnwDb%M6:K<~̓9*u᛹Q{FЖt~6S#G1(zr6<ߜ!?U\(0EmG4 4c~J~]ps/9܎ms4gZY-07`-Id,9õ԰t+-b[uemNi_󈛥^g+!SKq<>78NBx;c4<ニ)H .Pd^cR^p_G+E--ۥ_F]a|v@|3p%kzh|k*BBRib\J3Yn|뇱[FfP%M:<`pz?]6laz5`ZQs{>3ư_o%oU׆]YLz_s߭AF'is^_&uUm$[[5HI4QCZ5!N&D[uiXk&2Bg&Ս7_/6v_cd쿽d@eU XyX2z>g8:.⺻h()&nO5YE\1t7aSyFxPV19 ĕi%K"IcB j>Pm[E[^oHmmU̸nG pHKZ{{Qo}i¿Xc\]e1e,5`te.5Hhao<[50wMUF􀍠PV?Yg"ź)\3mf|ܔMUiU|Ym! #'ukMmQ9Blm]TO1ba.XW x6ܠ9[v35H;-]Um4mMrW-k#~fؤϋu_j*^Wj^qM `-Pk.@%=X#|ۡb1lKcj$׋bKv[~"N jS4HOkeF3LPyi︅iWk! cAnxu6<7cp?WN $?X3l(?  'Z! ,Z.maO_Bk/m~ޖ(<qRfR"Au\PmLZ"twpuJ` mvf+T!6Ѓjw1ncuwo':o gSPC=]U҅yY9 &K<-na'Xk,P4+`Þ/lX/bjFO.= w ?>ȑ3n߿z,t s5Z/ Clo-` z?a~b mzkC zFȏ>1k*Dls6vP9hS  ehC.3 @6ijvUuBY hBnb[ Fr#D7ćlA!:X lYE>#0JvʈɌ|\u,'Y˲.,;oOwoj-25Hݻ7 li0bSlbw=IsxhRbd+I]Y]JP}@.供SЃ??w w@KvKts[TSa /ZaDžPAEư07>~w3n:U/.P珀Yaٳ5Ʈ]խ4 ~fh.8C>n@T%W?%TbzK-6cb:XeGL`'žeVVޖ~;BLv[n|viPjbMeO?!hEfޮ])4 ?KN1o<]0Bg9lldXuT ʑ!Iu2ʌnB5*<^I^~G;Ja߄bHȌsK+D"̽E/"Icƀsu0,gy(&TI{ U܋N5 l͖h"褁lm *#n/Q!m b0X3i)\IN˭% Y&cKoG w 9pM^WϋQf7s#bd+SDL ,FZ<1Kx&C!{P|Ռr,* ] O;*X]Eg,5,ouZm8pnglVj!p2֬uT[QyB402|2d5K: `Bcz|Rxxl3{c` 1nhJzQHv?hbºܞz=73qSO0}Dc D]ͺjgw07'㤸z YJ\Hb9Ɖ„2Hi{(2HFE?*w*hy4ޙM^٫wF(p]EwQzr*! 5F XrO7E[!gJ^.a&HߣaaQÝ$_vyz4}0!yܒ栒޹a% Ŋ X!cJ!A\ ?E\R1 q/rJjd A4y4c+bQ̘TT!kw/nb͵FcRG0xeO sw5TV12R7<OG5cjShGg/5TbW > ]~Wޠ9dNiee$V[\[Qp-&u~a+3~;xUFFW>'ǣC~방u)т48ZdH;j a]`bGԹ#qiP(yڤ~dO@wA[Vz/$NW\F?H4kX6)F*1*(eJAaݡ krqB}q^fn 8y7P  GRޠkQn>eqQntq"Occ°NRjg#qSn02DŔw:ؽ 5l)Fa/TTmCԤ{"9b{ywSXE*m#3U ùRIvޏrJ`k|wJKH:O*OKy`( ݢe*{ ua ȻݔhvOkU~OǠI/aǕ-JMX _.6KsjA Qsmd  O#F.Uf28ZAgy>y,d$C?v01q5e.Um>]RLa&r?+@6k&#l)I5_> ` D s5npo}/ؙq #a2V?X~.4O/'|/_|&q̑0dd4>vk 60D _o~[Sw3ckpkpLNa ^j 5*<&}kˢmqvۗj=<Tr=[ a^؃ È(<^=xZb [_tܡ&yЋ{ Sym^?̑sU~' Ԓ f\itu)b>5X -$sn.wMm[eG`̵E$uLrk-$_{$# $B*hN/ٟ#^jJ=䄸-m!AdEږG)շj#v;#y/hbv BO Iߒ {I7!UՆGIl HƗbd#HAF:iI }+2kK:Sov3b:1)'A6@\2X#Ih9N ̢t-mfeF;gUаQ/ .D%ES*;OLRX[vDb:7a}YF30H #iSpʳ]'_'ĕ -׉6tfЮ$zͪO_sYq+q艻*vzh5~Yy;,DiYTP;o./~^.6+zZFD& m@WXe{sa 2tc^XS?irG#^ŲDI'H_Ȯ;RJ&GT.Kwj;of¬zHmmS*B:9#s,jV剤C:LIeHJ"M8P,$N;a-zݸJWc :.<sR6 լ$gu4M*B(A ݖΑِ %H;S*ڳJt>$M!^*n3qESfU, Iĭb#UFJPvBgZvn aE5}~2E|=D' ܇q>8[¿yp/9Om/5|k \6xH.Z'OeCD@cq:Y~<1LٖY9# xe8g IKTQ:+Xg:*}.<M{ZH[^>m0G{ ̷hiOO|9Y"mma[sSbb'Rv&{@6; KE.a\}:<]Oyve3h9}E[kMD,5 %sO{킒 8.K?]i/`׎tp NvԻV4|<{H@#*h{Yp/E%dlh\bU:E%h@&SEK [ Ƣ xg{z%ǻViX~鮦w35QE~qp[ʕ@}ZL! Z0!A⼏q)[f &E1K3i+`JG P/EG 4 9LڑKL|`PОnG#|}qOR{Q|2_tH߫%pD?1%(@nfxOrs25rMլf{sk7݇fjӞh2HkeL'Wʿ}Ƞ%>9cSH|cEyQp 'ˢd:,v-us"Iidw>%zM@9IqrGq:&_p3õB!>9'0LL]M[lwWVR9I5YpVgtuZfG{RoZr3ٮr;wW:͋nqCRu1y=㊻Ij z[|W%q0 CJV٨3,ib{eH7 mҝ(3ɏO/̗-=OR\dIoHZ6n`R֑&#.Mv0vԬ]I˟vrK}F9X|FI#g.Gi)%!iK|o}|ֵ7!ېATJKB2Z/"BfB(gdj۸=}'),-iX'|M2roK\e5Pt:*qSH PgƉU'VKξ ,!3`˞t1Rx}fvvPXdQSg6EDT:dׁz^DjXp͇G|X5Q9K$)U?o': .,wؓaՁ_ 3]Q16ZYafuvrq^ѷQT},!H]6{Jw>%wK{)rH+"B4H7-]r}7v8|׾~Us?yWfv3>xpRҧH-EeJ~4YIozi:nq Vq8swHOzf ̙eX-4`TDGq G.tݻgq74ŠqBFf8 9Fk Afq#ϛa$!qNCJ4bnvB @W,v&- 6wCBjxk9ᤉ ,Asy3YޜZ4ΓVYf'h?kNg?҆8oC!IMo:^G10EY↘H:L@D+dˠUHs[hiҕ|֏G/G`' m5p|:9U8PZ7Yݷ/7cs=v{lLHqyXR iE^1x5/[O6rpP40ޢE_A͝ Z5 om2p)lbp/bj_d{R\' 礅_}=\:Nb{}IStgq$<$ilb)n&  $uT{wD]2cM(%YjDktByxVl巳1~jpd1O9Á%˧Byd}gs9QNʟ. /ӦxbHHAni5(~p>/O0vEWZ nY3 cU $O,iLacoW1/W=-kqb>&IL6i}^^XpCŋ݃k-$pxbڲ&6*9mg>{rtD)wQ`pkKyt1?[ˋZ5NhfӛŮ Qu8Y4?W֫/&W˸~%pqq{% ?K~,#/0'NZ׽Kq^ėSJ6#j8GO[ PCbʍN^XS&}E9OZ]'t$=tnn&nu [}Ab4 +OLuU{0fIb { O݂9x 8/{g('tq.B$츟~yNϏI0USwB VuV_K2k*`cKxuBG'&24T}Lai 0Va(7K#ӊ!,ZDxFQO*lם>!4ӥ2 ]8â6 U`V%`!c%؎ʨTzrKh! c.}.D>)d_ 8rcu,wf2AU9c>Xf~TTX)QӅtӚe~=WtX-sJb?U'3X7J4l+Cj%LPFx&w6οH+NL$]p>8UU>Ѫg39Yg>OF9V?SATu_Drd31V_ѺUib0/ %IYhq ҕ  O UA!wY~ -`%Űb`\mS38W1`vOF7-.C!Pu&Jm l?Q>}O+D7 P=x@`0ʿ26a>d Bqε^a'NԋsI`Yu.7v$Rt)Ag:ݙyX|HkX cU82IP qgzkX=>׻K_J%E92' ]qҙ%rXgs+"sc9| ]>T]"JرWBΌ-zJS-~z30G@U#=7EkX) ^EUBֿqBԗǖ9dɹtw/2*dt:8;үM3Tu (9UR'dB7™:%GY \L˿Xk3۾4\._}f kj· dM_dyjVO,?s w*n\7[cpMY<~/"˘oV܉T6nn \_ߋV_}Z=k-nn sn.*upw pX\_ U-M_w ^b㖿 k[.;n{]ȶM/: ϿO*3 `Ђ6a-`kIf-s,RL-R`1eL~dپ&+IhYRczr?㐟,v~,b6)up)3K,RLW"Qd9JgT\1f3@Kh% a4x,kA k ^d kYj5Ah𚄓vXZhX1xҖ51Y +Id ZZ\C| fD>hB֡#-$+Jpሟ,Cg:6 3 xH "}C[`ӨOAFn5ʬLH?ϰ:N@VcyBI#Dr. "h hg ۃm-qu>V&൘ G7qi#^tҒ[JI!{q*lrD܇Gk@;oI<5xZ4xM"؇'k!>V|lk'{d+ :sXӄc)?W`*|\v aVT0"tMًcΒVz]T.C$cEp._0M`AlF̤@U' u,—rw=3}resLV&ԙy=Ejl1#XX۾;R;+[$4pjfљ lݍ3)`xvcZRT\%fNV Q)nsX }plMa~;Wi+f{v%Ζ/K 8WPll{f_WJ|8(A ä>nl"jF;/-R9~ {^'##AA:s`uih F% [U۴"qkjXS~+(f?TT)*qy+QR"tJ8۷)'3J1>pnVGITq3J&J0CQ v&P_񾅶X/)T/ϧ+GJzApU]<:Yn\~%&58IS)`0効<9ViCbw!bX%E+o*ƾtNU*v-zߞϢ +4 {e6J69@28MZXc Ub+A_Aܲ'SoO1ۀS`*f'r[8ݝYvjҩJ- 0v3-)-ٕAg"pZ: "ka+n!e߮lɹL V3Os\ဝ+A= 2䣔AzG\ ` \vc"Kj61O Px"3Pc /' PW*3GX liWv-6W&)cX |]O;C%8@*Z1%8Gk@5^NtY"Fbi8D'+_1&1 7U^k6v읨gQ`LRx+I&s5Www` q:cdʰ H`X;"}B=-/M~C>''1R[sdJm RD3Q{)bJatdq>*Ct/GǍ-`2:u)"\**dPdvc& HwMlF@a5`+F>ΰ-q>0*s%Q)L>$ćYV\dsEGز/:ٕycZtO 2ze31cDB/eWy!A/V4cbpWaPBIpqS<(lȣ'3K?e Z?ڠ8VSZM}pnqL f2D?mzq*a[~;DY〩b𻾋-]f8dBմVs6傊zB"daeY(R+q%sor|.v\sfa:TX%;3Xl= \k>kqBbB;t@/Cԍ)Ga[ r=nl-w/38ѮI*/=2!j\FW+[3=`BZWX Zd>t*Uǖ\*Fu6Y3[y@Pj|LbwaIuR;uݷ㺾|47ߍeys=.EinE%Ϧ 1zY\+͕߬VͭW_겼cazyU1wOw)Ǽn@6 |lk'Z|VZpsqL5 څB}>u)^v~,󿴝} 3+m𢛲Pz_Sp2auQAP*tLnIXA6L7 8UgKdT)*7>p{Pgi-b)>U}m*2@"G{yZ${˪A6yq>Elq*E< NX9@: Ih~|Y4sopp|v1f2춓t$ėm$_P:N&cl3`d7+|쟿U$%ˎULXb'>UՏJ Q6沒yɆWD36OQb_$;|-Aقc Je;,JYۻ86ƾJ%!E/{<5aSVss+~=I/A%24SB~o@"HhﭧxPj""^Dzd9|^R9[Tiر[7BfAK*1[*chc񈛮xNdq7G͸k^EuZ3m`淥6WLE+Į>LTrwgQ$ [SIdʬ%UW%+ pq@[r^k=L6 -0-aӑDJ0^ Md% T (@(V bLV,*G,oPN璍T֨*D_G8Oա_.ѹY]yTMjV]Vv`,qKk]rK0>3vp0K(B>0`H#奊YRK=L;L`s+JX0Ƌ)8hYQ<*9q[c}+kMGon'y_7,9Yt>:zǎZeIjyބ`oM崛gS7[ кi5&qV9wi\7~.h<?Y~WRmo-,?_yNt.v赬Xjq)/͒39 `u}(D΃Jn @C[T.XRώg""4pD~+VBN# ^}i7S@$*ԆiGe4}3퇪,=I)42/$?>:JJGl@`9Q>~S}699{78s)D*Λk"2|8y}Ll?U^^sq*%qgMvyW4אr|3!()##n?Qo>3Q ADs]YMwSON Zw)h LכiWyvO@s<X|۫˛pQپ FU8kꎻ^S)T{j80x8ƍ,6ύhVsgE.8}{ ?9)!CG f4}ZѸ" 2M^3B]"! YߓD&,Rbm\lz!{-R׬f gVۗ@Qٲ D1I߹IKds(h$%%X@ϊnKI&>"4x2%iqkYMQTo8o]uSETԥhA_/2ʨ\}n]L^@n#1$н-8XQr_m%rLq:)lA~ vM $tzh\ڲ_]K'm2Fu[wѴc々 B(ܟ[2}0d[<>? mlګpB،xq¦2qϣ}949L m}I^$ɯךbQߟiSY-Ҽ4< xxu(֮"<%2ၡudI$r`J<<y^k{&EG KԝLJ<.$P]oB_o%QGk'7io_4{rkqE7-ahP ToѾ}:Hc%\!Y: -0Th6hK&b9`~B2J I9jGrG31?'2p6ay"RDϾtX/t\VyyswEq{ʕFȴ7%]Rv>@!Oŭ`=~ʁ'QG?= DB8hWos")-c, #f̊R :<]hQ\i_plPĉX͉Ҋ쨁Ӯ]s8<*nTT-o*c5|~mMDu:^5Vאlh\_ColDNf i.Aݍ gm4J{r 0HhmW65e-\MY +2оT1a[8}5I}Hw߽AbvY(pԞ0%QTudJ27-ܔBKU{Z4̶;LDVص5y 1-bF۝$ɺ4K d d;08bX'+Zd׀ b%ƾ% m1Q"<ť҈ cj.<]`,o*q)DDQ@HRYE .1SY{,-Y3L]3fx- 0)yPQmmk! ZU+S/zUČ &-Z]r!Px TpßFLNgZ=v'tBc`Z'-\mHCv-jH)^){ѱ+P魓V/ p]k{&-(!!@ݶhKɨ*I2\]ha:PŲKԺET̮"]j\U "VH MVy?p:p}%!mTM"T sհqTX! Rzx^2a~w7D=bu D2^fU`B^fOVݻq@ڝYV7XN6DZ%mҚDfCxd,Zkʄݰ6\- }͑irD{Aqa "|j4Yd ;[۶V/A4J1yj=qbW%.nUv(!6sl0ݱAmU݂wW/^y"wVψxk)cU$.$?,gQNԡgl N^ںv$ɩປ6gX- bx{-ZZs>ɣBc]cXU͘/-˱vi g1аl'4ȏ3fayoGeEǸq@ W-*/ 05oS m%r6+)*ZO|o?k"\Q lWݕs%ڊ/y5P [~VI=;DUH.u .b*nozd,"1*]_U']3qiwmD"fDڨ3|7V"X]t*6{'E$C Ut[֌opFTmÀ5 "$`ܛqxЮ?_v!Pq*:X"$ *Ø ō<̧YSaqiUy[ 1LYwMdqiL'?e(Zdѭ QUx hFh7 |/D1(oVy-G?/0}AO6j>*:Se7xMV5hgS!$HeJi 0vEFhD&p{)DGT⻟oaW/GMY I/1p;I._ֹo"&\wo.?36g=+"neW+, ѿ] sF+Sޭ@9@.HvJlk%+إPBL\S_ )J&5l%vL {^=4EP.շ乮X|{:=,,O`\?SwO%DtߚOa~J˷7T IT&<[02Q{p."玃9G ]u8؏_QlS" v'`Q6IB;7fӭ+[@ ~,`=Qw?ONޟȿPR@ E8/I%B `/1 Ӱ8;=P'9ȂH,i=ڄXxrǷ=i`5 ly}o|nP,)uCf02m' h$~Nք'<֯ht/HX 4":,hA$˩)\LqN>\NR:Pc'6:kU`ėϩn[+V(4,9cO$7dj4]K993 9szX q8^z8H3n g~Nܲ{hdx\',/@Dnk[]825ݝ3F"Ǻ#ۓpǹ]'2:}' z8oB, Q݇6wݻS[NYI3%I,Mjn`㲾:٩ v{;̻KF oc;xe/šٔ<n|$$"e!#$HջWs(7#.`F"SyՖphepX))$]2H^(0t^E@/օM#Tw%O'[2bJuTa=Sgg37c)`N\+Du ^Y*N:\ʵXz`*J0jBf4$F#Br%v4"0f?/,+[PgR˴Bе?(G-K(|{.\/т;©uQr)SE"כ8࢞`02FXo#nl $Ath_dCa8Lp)rFkXd`mo:fMyw$UY6-}cNx gRT`7u*mDf; Iy%0`\*AvsYu oTC#>q!5[,˯>!IX"xFl>`p{`9(5DFp`TibIq _Uju)-_0u&P<6lm~O5#$c a1r0CK\:Y&ˆ96.~LhH"BMtPzfh⪵`/Qj \ƕ"`z[Q<9??>+t:D[^d:^b#;Lߊx2rp$rU2t\MJUIj+uDdFP SsɈBucuD_ZtgEN|8j%ix7E}V ER2r,(T ٸ"#؉lHVՒb\.nI?ϝ8ŀw)=,лV߇ 8 a( l{E-oxsD.>`ҙ_gbahq*j{NŰ msfqϡ7nq|$o}sNClv7_~}yR #UZez;M`w}O%˻UoYSzqqfn:m/4&gb")0Qcȵ`*f3R9 &<-?m3avlFi22$u#IQ̔9J3>b7ukFφ Bss ) 4խGj10}=v6AO5s5Y\ERϱ-{U8=Rm;ocE"XԚp]Q޲*Z@Qv_"6 í4 e?*!{hC{#z>fbnpޡo*Jɲ6Y.b3LJs`6"gU,`E6G(nTgInpNÄVz 拐'UX*wx1]7l]g.,6^U?$W۠Y aIGhm'VfBm:m07=b+RIh#j:7adNg+(t9VZTTgiGa!Q}bVFqҮ+j'k\J_V4_)܀EpyGm[WOx ^AhAMc}Im@êE^v ,WO9*#০VP*(BPie֤l6e;4*{b]mծ UҴOm۝wֵV^ViZ;Jjm!*OQP{ AUA-uyΎ:[ n!uwBPoUPo A&[o/4A ʷo/(|GA[l/h4A 6Kl:ՕB&I?MN[e=U8^$SuV%^GH"%7XI|Y`p+doZݙA2T/& !lww5t` Ip >ss0<|W5ϟB+`VfUJ~gywFbZ.Iv =89nFR.WZ5P(M4X̳)0TrX 2E<ć@o7?AUIHшU$M4T芠nѮƫ6jRXCtTjW2K caUM.Q ;| !Cb@m abG(B{VMJg8=N;p(D[CT,N14Ϡ4/xCyim+-{ ƃ#4 [H H>_ޝ\DcS2b[>zu<'Q:h*,4DֲE᡻Z['Pݗ=U>.8v|8=cV$/aL֫(B}*e41;n.}pĴ< W6\2b~g~3ps.Sl7%WQW& !¿ T7bL؂h耺Pep=+@g4DTeO "׀u!'CxI5{ XND'X+/tgWz<$S`w^57~fblNcg=&uo [ VQdIK6`fsh@ۉ .>a'캪6^ӗj{Vg&\81 \H8HPИ#Ty,lH9 a4%; Lp W"* DUD6FJ F#S0 VhԊoAIد*rJiB/H^-\tKH;R:DPZ:|kv@]-,]BerfBftUTT<'.n 唥#Ws9O:4GN:[t<s#& bۏj8n%՚d,Y0L`= } ɏlEF0 $9K%) lb}~7A#iߥ>.մ (OѫuYՉ.A,O2bRc_\UN蔹a~ݪZpJ}7E9_^btc&h_j'ߗQiZ8&xkC.Lwts*Ǐ Q'xVO`(? =`C`hcߚ`A cBd)3].;T>(Ti\jAOVUV3p&R)Z8t؛i4ӃuhpVA;ftxOm7<|M9" hM̧FgGqk-#I;7@%*0>++Z[ob3޷&m#!VHD2Hmc1*{V:JF90P 6:mVR,btTNU8J#N"<w>پґ{Y;Kt ck N#m!n߲ܵ>a&k2f2P:vv;=}b'L.++TJtVɅ 1Mf yPjo*pXɦF3fH[:򖘘CR) wOrh*ovD ǔJyʺ`o":j܍jctۜF7]t_wU iv-@Rӊ,P,(f9J4QZzcjqA22}8it+WP WmkmI$y(*\IDw"';AKa<~L혎FtlÜQ׍nfϨ_%+?l?2e8ƯHd8M)qzs'!ڤ1l{?{Kc'w%m%YiվtZ)DG|&TvƠ.jjOY痻/|;nE{K),ku,"?-C1g|Z>ǐ˻bmO@tkFQ>z[JȜ9*F}z>:_ -Q.HMu >7~×d[DY/k[ `ZҐJIyY_s&n3E -NɎi.ݿ< GZZ,< dk2~/I=V_ 90tJS0Wl`Hijs3E ~ v-p^U>e7P20ѣȡ!GӥTwQ$O4S!"&ZmUԲr2,}A#;JU3DŽKz61*#d,TQ31gG#M5_r฾c?Gג!eFAd AE:'hO" nn(~O 3&z l_Xd?վ"R$)*z~;rj[& 26}qIky~1+5F~nuPY/./JbՒD "a!3wm!cNC[߿Z+o{zW3RBU#71\(7s=DՋAYHaOWq3Pek}S04tHM3'IG9?J{V8c|tPv[ޖc Pe$= Nr1Aґ.>]+ˁ\ghUD1#9QJSDZE[Y Y[4(3lS Z]ϕ$1ܗJKAafBԨ'@걃٦L"8#>4:1czC{,hd)Ε" mݘu^ߜ1T΀Ac !Z|ww+ Ub `Apr :I)ﶛrBu䒆ݷ=XdgGA'@9ies Ea7ˁ\͐IըQӎLOޚt^m=_/w8p ~n!&tb%Mk,4]% aӽ(Y{ABO.D]:&00I:T1%)*Gm#}o)>rDr=y5 y߅Pspwx_9h>S?G)jP^3|`īdim#ݚ~ǁ?zpʔ&]*WA{SM.Bq1%0= +@7[nӌ$E\mv͙IjrB"%m(Re=|619DE 8z7Zn9Wa܋O_$n/2Ve S~V[Cy[ByXuԳk^52s)P"V )rGA 97z>y 5 Cgt !" GP^{&Imm Yp~n䝝Ə++c>[#6Y_15E lB\b5֪TT U{_0 14Htd΢zځĜ`֩jKeE/WlY>rkfUnrDל8z֠5dRO~H=V.}oUb'!)mn4AhbHzjNUHsZo=% #S;QԌL8u+E@۟Z?&Q0 H2@mwYLzjvb@iWL R6#X'z4H{mRi+gdgUY}!%δ1S*Q샳zC{9DEllhc1ܩN_Rf"FL.Fkl3CL!1s0qym^^:tb=ANN?Mb|“mn^L Q:W6a|О|8j87 q0vE5(<ࢃ,јDHNNnrNv M֤H:*(| >L.K*L sI?*K Ea <ǒpnv{5m{G$4O!+r1*FCWK9lʃ,uVUXH)snۖfs蟟[OBu,cqVJ]"IG‹Gݺ#8u=:-jmӳv ,)?C&>.xCTb=F _(b8ij6]ߍ0w,7]k/. T!I$SOI<ǁ\hYxؤȝΪbxLqXp3> a|؊82f:V1PhQ!z EJ-*,%룪'ײg^WIc5&6MuV1&)xWK\lOMV^ ZtRBD/WNC/D BgJwj7,NU}NCީ51Jvh.8yOTO‘3"^{rȉG" DNtUQ}w_}Hp|IJi$y?Ă1gU^ +Os)y5烖{Vg|k6%KD=Hnih:PDVR梈_,OVO^ ;5Y]6"G'T˯w,8wVu\ZF >E/Mˉ̵jo&_*QeG 12bPu ZӪp Hr'M LLfVlSG^W!5e['"~l F8 A#r T8$.e1z̪S`no&7ٛoNƘѮ;3*'B U)߫W\'fpIY ~[W{vz>ɜŬ+ɘ%e= Wex=4Jdp%.1QWe,iޞUwY( B}%`QMZ.[h`u r"$߃xW i6gN?agywVE4Pt4Sp(oT6BĔ2gQXJ"]g]FN:i `ɔF2j$Κ$>=< vrKXcВu&=KSGi Sp |C _sBewۘ࿼a֜װªNWx^=xpt_S!1G3pz@]tIR/"E;sjG{*ɩg5uqprne癙v2o=>MʂAɷ':q0Y] <& `"~憑-jie/ eڿ5dS* Z-$)Dr~)љ(weezYVnҏzV8E/Y|zvc֍Sd cQmeZi*я%ΊؽQ5 VFE|T U"|t-oxpQNm[Kυ%Z BUjUl>8D u܆m5ɫS>S1L6Nj/eRȝ_wd3X)ƫV4^]uhUY 5>H:`YpT[H+ZK(?kI&b|K?4ЂiA],z1?]o#W^6yۼFg?<YRL[nY-In]mx>ů"WtW)9 ;vw8ݛk[ĎYϏbD@?]Fho&jP&bf7oz)a'^)&vp3Aפѳ@ؖ^]?gr86ih?AGzwXoW$8aQ0Y*7p?6>4G*uOD3{}|qD#xmdp2*P#|@K`T)~vNi=\_n ̷/Y<o&;Yi%R xYc#y~2s6Nc5E (<_7 רkQ칚5̆XsPh2?vMˀM*|쵷A[hqrm>!'~mr}/{_vx # 64Lz7Ya9c$;57C/S]^foO;AQP~:3zGYtf0ר{ՓӀByBDAGUWɦIևxGI05R8,'*My7s=ItG;Ivx#?>tr|]/Ƨ@}Mdl UcE EhwD_=qu5] D/k]v4Nsg3ۆ+P\%ZM:-Sb0h8)s,41~1UK,2FE.}T7IA4MR>:`tV>p7"4<漎ynHzY J*8d4GN~e* Y(ՙk ͚ԫy^#03y㒘ZQ Qqѱ],] fJUx+U#>HҝX\SQ՝)7W Kk#;(8s]phP8NL9 x3vW5msYV^1iRrm#x-y"g` lerp p0pҋ;&_WjW}7e7CNԖ=$kU M ZC*OPg .x77' .Q780RH`T#NR:9 _+C;!Tsy tB5Fs1LZ3,u2O"6G'\M౓nxV[rZ(*Iq5m 'jDLbcHb0J5E)8yK-#p80iuM>q ;dЄ2Є  TLDm[hEڠnQTKsݎsȊh < E-nYA@4enIՖGkC.Km7(Z Kqm֌h6"YD-Wmb !Q{B3.ۡ9n(C]ڠe"Z+mB-' }o)ˆ4Eq~$( \BNnEs9VP@}qm9,ev: vP|Dξ$# GE'OxqdY>g N; Y/TPQҞ*"s#ZNHovp,ܾk]8h7\t>e.$_+p~ ŴvyBjj+)ZxA⁶lУB[ sFJGvNyUNaC", dYV-5*RRz\11Lv <+1V;4|tX;J^(p&xfF RD@1YĥsAcڇ'C#%?j'xͽC:tgi\ae F:Xg_hgi_FM٩Q`ў5 Q\ٞycZ>+W@qĽiʅOCZ/Ժ`#b1}nԿI;Zq*5΄=aB`}-ߧbcWwhbK-| v @=;/|}MՠMj-p N/r7^)i3ĸRxYܭk CpmkÃ@p34JCP_/-A9/5A~f4L>3E㕴?)7د@4/kVꝼć3|y $߻BWx[Nk_;}k_,Hk^ydZlZC_auXyvt;h% }LH7y^"B//?©6b~]su8/wIDTn4Ud.jOɃU $< A?cn㵤>|!8iі kKj;4g#n<[:m|;].w2<6F5 Fǘf cr؛\ed;ܩ{dǵ/O>_>0wV4WT7EIh:~2?ɁA3kNHT U۳kA&1G ˚K#ߓJՎ"v0of#7$P!rDXx.xZ/?6MyzV#LLX#JOnweo ?n6Uo#1Ɋ2.|ۉ%xP3pN`B - _0K#ᛟjYu*A+C_YLJNLgW1u&@F0ɰg0iOX\Ur7Ydw| GRU]ŢJFYp%%$Pỳq1H5hFKJR*(J={ZǞqwbm&3j6ϲz17RiZbBˢk ўs>H;w(m[:1OV*I ƅ b AH9݃Z;%WĦ4"FsI!c~gh,<5Ƴ 1Bt s"[|s"\0֘?L˜8N@x 2S t{ !T#E\Hyjt`NuL#>1b!8DfW ^1[Zoda k} 4FjBվS눩8Sk (xj=q9nx SWZgH"Ne|L(`XEL')ϱzN|t?{W6-epi68ޢb?B/Ʒ٢#ۑ8";bilC!g8IRa d81\s97-~$qjLAd?/~wNF?o\&nojA|[M^&Y,|O1txo,μ%gJLpͯ |L>hSm0@C$] ؚonr#-^ G9s:0]uy9A{oxu\Z2c蕜yx$p@IyV ` lM%jYV(c RfV7;qەk97 KN Img$61f5,%i,ib 2'6iSܑS|ZӔcߛ?h(-z@ݓm=fk5Jjjw9L QFHh""6`NJ,QRޒ]]}y:/Yx2yeҦlPt? |lxT~6Qm t6t1t2iYzpuA0*wO UAp+ bCCxA/ʀZjI}܉j]itNf]4I 9 J12IMYutYZ2E=4e\,8OZa0Rİ+Y)pOf7cfk]*)i MM3晳1UiʙS8QX DYҍXE a΃+)**2̥IVcEP;mA-Y XEIJ4{S +QwM@Iu:%BڐUB?`]\L[|WD+[& Ԫ>di\Evӥ[ᙋӳ$4+I3˷S.,ľ'zp͞}o(_SZq[ 5$5!EQyǘVHТ5ǰ-oq۩fՁ)V-R5p9m e53-TI1r"I E[!jT0L}?0)3~{=b 8 2iJIm87; Vs\T{k|^&Emf!\uF!Znd|Fu4Rpֺ5ZjNò9VZS\@-;ѻ\=D`׻`KEAXV^\GD7r}_:2 V_? T ~rӠǓK?GlWxˇ`H$c͆5*ٌN 'YIMFBDvЂq*ź9mQ[TJdtd9r˶e rbċfpMޟLQ&(1Ѡ9=.}*<$x|vWma ;% |)dN**j-ؖuC=Tf4&+rxfTe>ZuGL(q(jiWٙ:l W\9tl<-+Q[&qXDfႏazB 5/}ҵ¦<5 UNXkt%cqcӸC--}[»Ƿ9|A6J|F wժ$nf6m?EQ +>)X~{`,i9lΫ][zO÷ş3t>f:"ژ9D;{U,h4x2:E΋QYkc0oAe:,O”K U|ūM>cU62j,e+Ңx0"@}~JH apB12?y{Ax0%35+ ~LcF9>`PF\`o%'23ܜnFYf̖=ÍQ&\w9 ޽΢Q{稼!|W(,)<$,s(Ѓ(.aN8hxy>K~yÁ'xŭM\rdL%[ӼϳP pNJCha6,C dL)#[SCZF"q6<11}LRhZ[._͈2Wj\wͩ _LN?7.s8dU#?Jvr(-~˴y*1%ŝG^E<8>R]Q5|}.x{e-MGEIKz3{5?d0ySatAd4}Wz>Uxd1rz"(0H{Oϊ`W1*yYNl#$2e )(rYi\JÄ[})ZU5~Y)lͤ:#F^sɕઌgo1mˋx>o%&WLq ~\7J_CV>mD#6Li1rE+s5-N'j-,ޭhh1"ZUӪhU chU}P'}Lz4Ւu^yogqW[}: Ϻ]_-Fp"1rn(ӱ0!E ,qua2pYg NXt1sá)MhR׀H9ouтuGG+l Wb}p xP'PrQc(0Ax2f y"^K XS^չCu6T:A|_ bD!.]g0!tfA GA`>(Xp~DU0[/nrr,/-]jɗI2\Z WPsc0O&GŦkMז,9E`zr$&10!+Rº x9L |M>W r/oM蛷$NC^ЌwD;&tS%Mi\[^>?/~wN}2*M0ufQy`ot/0mϣ& ̲Xe)0sv>Gf QO 2\ n)[ ͬ\xWs\p,D"mZd(KBLÇ78T,R sj94aL؅łpGMFTJL~8 Sp*Gy) ;GCxͽί#2;dRL̹hTSbJc Xw%!]N(p-MNF3H;M:stv`B"<(ƿơtX~v: %g;aK|Cvm Jlqjɫ6l\E {WýtG>w^75.K*.QOB̿8lci6ӒI̱PpVKưX:D, ؗ&k)ؕZ)O6'!< ElUh,6γ[5SKL7NX=$hB5*p"nHUOC<`ܖFENQ"%6Ldba*,X<4_J~ؖs -_I лԟ% 7k5a~_ 9=(`i&Q|+,at5oyEQiM'͈KLSp{hs ;r}s4nվ͏!@nx {,?*gz8_})-6<,f0N6#RDJvSMinBN>:k=}Z.SlY:jMNMjTdL~ߝMToVaJ/rϥ T7aˈIR:?VS|%Js_6%ӝ6}e0[ү);ޒ'.o!駟~A6.RK!Wn)"![OSòW??9:o`&^Wh25T߯s;߮R^j#;Y|]5a>IP vOP Ps*G>~o`);1\6ۋ=\O?so͌"@*&/ jWGK"i-E!3 kTdu2ϊ;8+璦6e,V'3)slhߧykCM ]gE'Lf쾝lWɯ󇷹cǫejސ/W9}>`xz@pz`oW5aѯ~\]_ujv- ;v%| &~'՝P軌?{o+3N]52y`/soHM}Zn_yssx{^w\|XWKOD ;cYS#h`E]y WOxuxjXj@\\Ex{m?藵}o.aVI0~PTyceۋ<[_Q}bThLmgx~DNtB հ0 ii.vQ_>h{zp/ N/aN =?ʉ-7φrlohC/@4w<7‡c\V ApO4S 9~J`o%fd:$M.4TMic.#[Jp< LɜK#;#~ C6zvir3 i'iZˏ07/[ϲwgnT'MGӛK *>@ b6ŜЎM>Г;KFy&ZA(tAa=\D_o%`B)ƅP> sEPH,ZJ9,0g`n(BZ|wןC5IkXhoϜ ic )MN0N5s|2 S<Ty OxWZlBRD;EpI {dJGV9kDI/`yWܟ 1?L:3 aUbZ;r%v*hn wvRC&U Q^^I.!ƌG>VRNw<4HP訶#7ym gDc8&j/95^a [OcݠA|pʹz/i颐,1`GN *u =sE9|ψH`ҠusنM[EJ"Ku ֙]BTN.(Ͳ ]{bv#+\e i ^FMu|YI`I*>hgvu#xtW7|%8 4I[* `%HRMu~)!];E>ZZ&IMԚ/oofծ b&l'uo+*W߯VNb9W\l"/ol̤V%+㊅-aQjЯ!fEL8eJ=fLgǮ{9exFt0RB?$eaH?>Hc ՘^߬ ׶d&+ЊiiHh1GwNJ7RER=s9tz\-HXM}@P&z#%8Y]8G ܑ#mh1Aӓyiɇ Xs8 NF8zbEf l_vxcM,TD &0Roc x.Xخ E>vgL򷛷-' E#! M&5/Ō]̈"+ LKbY5>h7`iKPDoI"B#ϛb)c÷q] ")~F؃ukh53J7lH #`C1JgL$ճ*|(9S5KhȖz׺DPX1>KՉ+D> ߝNx84m9ְR2r|:YͱDM#tdO֞l.6/(4pYL ^Ș! y A_d @?;g\.'t$0̙i<Mw`FwČ#+f;]xf].ɮ rE>1Ɍ&b r02pzT RRW %> AGXʥ' 3Q䣥{Ơkx}KCZ""c|\D)>8K4aLNXMB6YLbST3wYq2dOq13VyAy ؜%і^k <8kCs7'`Vs RLi_o Gdj fZl%IQ8D$AfEU<9M7B"+usjyf|3ǜaaG2{s%Na[;ke&,&ǣ8(fa!-3>m˲yY0^1˲}e1&w5n-H u-93aPs}N(@ ;3pydEZM>9MMBpFP#eoxެ^_{昵e0itɳ%=}m'N`N?uڶoܧUr4UZ.AfC`RiZ_}TbYRRғZM`D({9i"R<-%$ҁZav8(ҝS&B{6p3%`#z1Q#؄ś8@lp-BGHaj!iVP^dhݠ|'l9 Tfxqsz>MPYGG37r%}ZTAχ9 c6-I[^J a!(%WQ PG SJ'7wnR3dW57X'j7||V F;'93g1uǁ0E>Vsc{̏1 @_Crdiϯhk)C_"'$S+EKJ Ju-pmK%S(e+; N[@;aw'H7DEd/$B&HuZ 7X ׮ O zʸ-“@C`eE9Ϳ&FfJ{ŽO@ )ה=%Pbe*Dem*)bX1Y)dT:uu7X H6=sʞ Eg5s.ƵNզ(&MƌG>V6}@ ޘtO7֢n+co3 x_Ff,IT2ɞ9{$5d:C5j*gh}v]-"+ݩSv0p=kC >oÀ$Ld D>ans\.Xvg9q#xB?'gh0ǯ`Ohˍjx-7f\7E>Rr ˼^zXbOtL}39j5~JR8|ѵ\)>%Wբ]H<{"u|5nAZ=("UZ(EpdŻ뫛 =*.:>"pK77 9QƾIkT\四Ez\֝kc}MG,I(nq1\i 29fy vY_\?)cu r{fDˏ!7*#Mu/SV!-ˡԤ}=n{qp/|5=_?\<3%Wm.ɏC9$Hn$|'ܻ%P s6^8<19"mr6F}Tg1l{o[[WWM+.^q?kiOxƔ{mC B0o?pGA] mhaӫXCCu>C`=Ցo޿şO7-rw51¸DIm9GE>VR&wssqFOg]nn$ @UabΥyb nb̭&HP}nu6x|9goe K6ψ4ᵃ!9l=|awO(VpfJ9\lpuņ\_-q1rD)gB D|,tSGϧ,>mk/RHW )zB<Xs6mt7.̱!(AfrO`kfKSn'4gY{7+|i6."^pv3bYRDڞEUEɢմL 1;3Uͪꮪsn+:hC2q6-}NB|X I!%A 8#&YI@.~q CnO}sԁHqxgۑ`Z<+[ttl./|h.іCoXGI-]J=Œ~='P: ui OJlh4 8I৲܊k˽#ԋ1SgAl Wr=q2"ہ["2sU;h8r.b^3lz^|Qޔ_*1Ш sO͗ ||@L_lbq#LX„ܘL:ݿ19s\Lx6ECdLWk"#MJ8Ŕ'gO子CVgZHGlɀ/|#}?Ƕ)qcCG6mc8nO]JV_\eMۉkg%,LGNIZA2O2O?|3Ig_4|EE hRIg`OP4 ȅT$l5ɱ=iu1W0X Gl 6"r{qt.vӹ$SŘ:_X9a! x7xg-.&SyCνu|\cjq_U/e~u]Rc(BH.Q RI1!XGwfs)#sa:h4v]]~*Ka=ǓV^i:?Q2$Ls&05mM} ! T{IIӚ,uxIea<WY6+5cծ)_\U(l 0O~MրDϋi^6 a3fVo ޥ)o}LUW;)v$FV0 \#_ǟ5-&F?g叫 ow|~?}̋#_F|}}h+?,X~\ x)緋yi*wal1Q#v '&fr?_E>uj} _.x NJż_e.|ofEWc8gnlz?UB{)h-WS;guCn5L`NLE!X+f2la9QAV ͇lU2= +Weō!\\e:0!LJ;f2Rʃp-a(Ks30OwzO<Q.5Usҍ]=ouxW%(^S6]bKz~do?X?{=KV є7 P˭qG2l~j }ʉ1ԚrHAWQ?lq}!ui wk9e!V0ꨶ2x-s KMbZ3 >0-90IxKfzg49)̤"9KaҌ;$fD00<0`8Bŋa7ɭ/ughsI6 + ;WIfRg._isE?kkJHJ 0vʽE)0Br3Fy e`ES /u'Retn6~T3PVm,8s'5R IiKfAxǶwx?Ϋ|ZF2`ә[DwA1 u6Y<ޓMEwb@q*ƅϛ\3EQ^ xKSy쌓bWm\M&[NyuU&0U?Ԋf/ˈ: wJ21# =@3B3r.`c S>\%lswaJd0X;rbqY i,]&U8<\v0r~Y/fR`Ɵ42G=4VQATrf[[erVD]OZ+x_+Չ_ L&σ}]X^: 'MT{03 QZ1y"NpRoex҈nHj?Mn`Ձ^czeű`EAG 6|5v'-z#zˉJ*kϤ! +Jΐdܓwe;$X":gvn7hcݠj|+Yc5B]`".0n]Ct3kq! & K6pE"blTX[-=H,>d}6JDniЫĎ%?8G1ro;g/N93sycqv]KBwEt`c"sxjʀ`"9V j>~wkg}wRl䏋m1т+?\4aFej#=?|%ؓ?f78+v\]gݛ..\fy1+҆03YPhw)WL+~"/4uǮi҉&䜷OIQmGxXGzӔ+%IMq$ 88xck4l%<־–~*Wlcp@^x-Jo==nv'u1XVqLѳէ`<,%"`%#& cx Q$$rM]Iɂ h_5櫕xFP0R!VZe(G$6$h'6$rrĆkG??!cP9M,DB3'X^h%h7J?匍Dc!țL cwsöƔ8X"#>'۫WTIiBAc'ȐC'4N*Acck1HKe9^ z$5Be0ŽdB^R=Kiĵy*}ԏeES tq?o¤BjY))>1[d|Ra}YڑI|Mr4;H*J9Jy*bC HNexZ#tMc[8*H)৲/VڣIRA#.DUon1)`WŚX-NަBLs 1I'X\̦nڧes|̞!=c%{vq\B6&]ޣ^yJ9#. x1Y _yP3Ü(,,,V̲46 .x?@)Ve9i8H:#;JI$DӿQ!raObSFdL )&s qe!ίvp5J8R=M2ǶW4>Yraz%,LNFЄgk9L0\;;q O|Q:IO'PwdI7N*ǢǏs^,vMZр-RYl EvfF$+h.Ys=>fwg&2P#ay_O}(`;4VÂĉuX:FiOp,Za\k+wr\L~@ٱE[XM64X&8zN0*f8z׌Ӗ6dr e\JM)s6T]5IP4>gcER_M`lC`) QJ-,M:e}Hɽ#IA%z5iSdoSDqL,0j-0 gHa3J-RO$翦K^K4^#$T$%8Z&. g:Y.s#$.ٵh?'_]`Kkg)-Z\)xq%wkKHb~>ia`Deb[]K)_#$.)d?ZBFҭXүRW-ZP)A%w~[I~Lbb-䲲+Mvw >g@T,} ^l{)S(6"La8/Nј$9I6-8f+Y11iճYUkykU$tv ;~po@0iFyOTI0[2֖L]|IRnk[M$xȵR4K>lI)ϵ)Lِtp`1zMf;h;\ FpI]ۡ~i'ئݓ*/{ yitd)vhuR:anx^kLN5^a|HZHIf3TxA)v?yO۸_^^P,ŖPH{qp>YǙN оČiQ&<cd6hQ IbLk:L(#<})G|}Kf+H}XQ,51Rsr7W Œf x'cUT%&M$61a>pqp•B8e|ۇj>v!u{ =28h"T8Vⵏ:K>j2(Au4N4OH 1F3­b`Pe I'3e6%\{Ń]`w ƐLIJI~)S) S,NfeeӦ~:%ӻ/1x׼DnRUWߋnQ=6P(j2TDh>V)B1IhέO+@^I2Ӕs@kK4k02 xRTqUoi ԝ[aC.ny"`goZ6>1z{b~ XhvtO{Rz#9Nzn-P]l=DPpNKjqPKmN|Z9~rK,VJ.3I@Eƞ c[h{Tְ݂~1uɾeyv tka2FvB\FXD HBSPA[<)3r'21g.%u2'z1'! p%ἆ'-H6 '/'k薅| 1h9Y1 P4\}GDK X]$1' R^0,@9 WZgx<,UEraF@ YPŋ[|4T*%vq 'fĞa>C3,TԌ6w) >X=woJS;sf7"V]} x"(Q c Gyĥ0#+"H$y  '| d8WvPv3y[S GI>D<βџ`y.o/ ߨ/WK^6un3)T\p˕KTʸp`}JMMF3;z7h`\1 G0LV)U"Hi-2T1/ݫsc͋y1;o<\̙\/_㱁U;LgiI?9({ivR~ʘUi2 !HΉ[׸4FP+ɜMX*Jy[4 &30]F Y2Spi?J!juncwTVE+9u&8Mg۹"caBsVmccũIgM3X^mzO 5?QXۮ!cێSCi(w=%p&X ǾP38w%mƶ&.2sJe̸]|,.$[.J6ϝQIԆ҈Y&^qoQ0*~l)%N!̘)QT?@|<=|ǹq0mTӳ2!= G ]iݗi+n.xMo;ӫ_>?|/->`0ή|yUѤEQF.t7ѷey+`b1o2 kG [ݼ*dIzʴ\P^;sCes@PgL`pfou~] / [?'xI~&]tyB hJX%>͑3 "f~sFdcf[2Wjic]@s+qup=H[gi(_UqXO]Kfo>"R·/./v>~Q&w-]W91s"K?\$ۏUwfիkgMD"-% X~L )di&XN;}zWF@ݤ7`L"1#0ȘGwEwoL'*%S8 } l iQ%u;Š+g8Ҋ-1ɛ4Š+MaT9D1ۑ0 "K$5Y]rr==6A``w^mR0UHn&$7]γ$x?2' Y,<1q1~Rcr'93ʍLiߧ&߯YYwݦQ=3y0o(d6)a|[Avqzi^#n,)r~iZ?~ o~0{{ȸ-ͅG֬/geY }LIj2滕^ ōFPX?f=s_+˺\}}MWgr~R۾ڦǻiU^I=nM=v,kjTpW̄|s Q[ w^[` NWx4π˯ZGUfSFp|~ XAO5=0[~NC\E?jnjQ^]^R/_^10ҸpGU^=>sY;!9yF~F94%EHomm-,/ KCCq.М FET"\ڸ+<`"qX%iUÛ>Xae׹,0`1`B{ekA׮/r~[][vVp:QJu/L%v8CtY]u2\O^*(reؤWKZ%݅7r^59pʺ3PJ c6,JI=YPfe4Ҕs')aq_窏nzׯo0F_,r_%Cз(A+j8 ƃ fmVzp6:ȭ2Ia!BE`& DZ}r: aw;pT+SPN~9"C0#(cWQMt4kk@o;L(q,f=5:ܨ &:RP-#,& rSi 4/G\Эbנ5rТF|hrs˺Ԉlm݂\R!>Nu`~)Nq-D$]EdE̻5OEc˗O+&&&`[/@g[A3ċTЀ(]A4uw/VAcxIݨ1M$uT6Ł|3d3&R*|/_cxW&גfiJ <ΉR,͝aJ2̾V*cxX~z'A&W+c:,w,^a|_޲vT^KLM>Ϲ@[; tβd]Ts#,g99õFs]&c*0սfsram&cʀ7Y.x:fbfRx!є#*mY,x/hsq4Z2nNZAYhqbp!^غ.GO}ADp_hz"]Kj/p~d q-"0IJĬȭ yAc" H*$y,(aF =S2\[HT(yeY{ܸ+q_z~X-] _n˭s >=5X꙱bIn%ƭɮ".0V=;ɵ mȕ8w-ZD gî )D,WTr` hV0_S\ч!s߮5HT-o%4d1`f8#'quIۀPl=T3}*:W0W0Xw#[F Q5zku!fêE{|zZNm J5Y ?b^0wXHs; ӝ֣ƐK'fk6njkqy}kھ#útTtL!!,R &$J*ps̱j./C̴0 ̌(ZB (9rX}QńRh{w/O flOJw7G,C{ Y+<v f;$°6n}zA҂ZeaO0Ԣ}KB@wޱp2z]) MmǬ-& XgeKk7zeOmSKrq5S:x^NmtlRNmo.߱Bb +)@ZoXȭY|}À`CK7aZZru=22`w nDJFl3)u'jK# TJ$:`}j7Œ9d+nŮ,T$a`kF W\u+|8a>&ֱ©p03igyR#ky$N P kHzPT^˷6Q>x,aD_Yo\yeяx .1%J!".m%`d6aP>U=C ]--]`U<䢁^6,<Q2IYSB4I]*'IQEsqk*LW!.E۝~ؾ\ؓضG_u{WӹhWdQr1 @`6Y^)_2Ѭ*{*XWyܾ/&Xw߾ ;T(v8pHd6FJ&ϗ,#(Й@E<94X`D..0f(x72m^{^ Ne*0N#ԔS=Hd"E0 "JF\ #8VJ։wKHtH$$r !FPp4LԚ.uĕ_eie7(767Нn_F$_xQ.~4^g=I;A؞ DIƉPs>7m 1[ɸK[;uʟ Eml塰MΣ7iSp&n'OMn|LH8ۍe1jA T_ш!!o݊[dngbhN:>* SY t+ۗу u$Z>ăbFl58ysId6L*;/pk~Ђʮd˘BhDB6 guX3\{J Kٕ[|pߺJw]˙Zpe’/o,UzΎG lwpl瀋"NF8Eov~[|~jdzGG[}iB7?_7>wv&ц'?WvZ}hC<ι>l^0 pQ躒Ъ^D3S?Fs|7(w?Ohs ̍E \!JL>biR$\dY~[2 {~rX&+zxDM٬HY|N+)jiпwVHDJK6N`1#ZTF)NKf 7F і`<(T(ElR 'W8s{jǘ2D*{ gl9|Vv 'zN '1´QB2Yn|>qLS9!r/6a ЂPqɋpYD' QQ2_SM`DYgt]|7}3vMgr8(ı R"4UrY#Z#Ẋi"br 12xa!pzƮM% _%>ZA?!%R1-iK=} S$h@[ӶH oƠÆ @ca؎]Fx0nѮ09K{\(8 i1b8T2b]|DL]٭V^pw9d&eA`a׻wUIcB!M>վp9ڨom= vkZ4PAW8wg0X'47r;ޏB2 \[;UõpcjMr+vd,V;8?8qtDAްB?}{Iql(В;ЋWMKkl4nڴ8hRp\~Y5A&-IDZY4KNjb9qp$քx]%83&H e0"N]n8 ;s,~ amۥey"Nq0 8M|htǃ ]dg؁/so׭HƟpa*۶l>}f6l-‰Y5zG;>fÇ7XBi +6|a:>ƿ~?~rL˗j4j){5ZWGF,DkP1j\>ljL_[83@èřJH;{"@rNtp wp1$G%x S:,LjX=;9BG4~5gbªi/!̷EfsE`b>9 9t_8T=_vqi3Wb}gᫌw-leE76Udl,M~t.*@ |aLh}vnx]97P,(`.6@f3EdLg~ 75 ]%(J\:=',"b6*hF a'e#+/q8R(l-6H#dBDe ˀvqas3ˑC 憂.y@|[)QdhF'#'dFժ |+i $mIIY{Q݃hh,#T悛_%\g16e\ ;33J&k%hc0NHbq/aNC?\j: 싢kkc_sR[dR*UOq\9DB(ހrpp2lx`_/ڜ]U=b(8p] 2N+,Ts\9 FP.#-d~Fuw(ntloc.t-‘9+^ dKϖ$GA^TƖ<^USTdF&ooˑStvqavv RA+mHEhT+cw;ⱱSE4[,"%:,=!HbFVTDqX)x,NLIhU(ln'GT;C̆XDkisnC2s$/Ϝ+#vI$oX#95$G\hSA2sT]JICq 䒍M888G㘕_7Ƴ_"=4G8O p0ύ(FtШ D 9pVMO,Lj 94*8dbA2s:g@&PX@x܍6c(fx$$*NA5Zx ʵ'\3D6hLu%JyYmLlAlgER|I?ID%-Z10.&&lm(\(~Q9d` ;o[sU18PxRy)Y(z'p MOHa<aVy݆Fegů=L;hTf(p".Zg*7ne&H9oO<_οYr݂y') ?.;*5sb(A2sp;&]A2s:1͋pʘVn]]KH[7c=/z$E A!,,.N01&FƦFey-%F" c"; @E~-%NqZ~؉媳2_>5}6ds7WfcPGe_)Ш̜ Ϧ{*Tf 'HȊ*ɷο&X"+FeHq,eظ dIR'˶YsRtrB L`ІH+͜" ts0le.8k?x0"zrtFeMU{BXfyP ptyB )۞1>(3v 1!pAάb.yc'eB\g /{jRG\ƥ0O %Ge64*3GP\y1WHf>!q42G|`aÎ%Eה|8ne>8}>_:C*L=V-Vĩ$ 61`cR(>mhTfx=>M l5709,/!)ϒŕ2,P&î6 ̱ZyC IGIUg :.0LXr)BkÙYJbc<:)m0!8ñq#j9 ʇeg&`a+sθkE$+Fe +Jm%ҨŠnOYD 53qW9-pv+3Wx% ;94*3J0O SVT8K,;$R(Zڮn:vL}8b `gehf"~A`|d4]*F\PFMM)4*3$]M+g챬LWf )_Jn=J0&R7R:̨L -ʍ ^>OШ̜[OYxr* jTJ$=+S T`=ZxM'koT^DR $"{VcV銇6a>·9ِ-RɉCY}N*jg]V8wDǤpX~paomEꬦ& @N5[q"0[ NsN CSe/,N4ji||!:ڟjcA09KP@N C=OP@Ԡ||Y-%Z$ /oY9e:hTf,6q2:$Q99}羇aɪA2sYKV! %2.'( c֡0UZwQn*]Că *h{q$!ڦU Ӽ#QД2U/WE$ڽT (DprC64*3G ߄ll@OjE H]:':fѰBztޯ}V(ڳ+3^B̝&\ÕYY]D*r&w.#ߧɈFHeҁUNhl I Ru1'cWf,.؄軙FeD?I'ke.IGhxR Cz }gW7s\AbTOIь4*3p~eH]$=cyƌhЇ9ʬ)%ly;=гjloƬF3?U>am |6[oШ̜H'W6XU1 O&"'Q 8 fܤ ͔d19*47="O'T: u9nj:hTfNaV1KڏnD:h.P-Χ& ֓YIs_.ʼ+б=L9o9gB|gmbs 4]A+s@وLˣWoEϿywtff鮷d~~ϟnym>j?ҏ9F"BKE ۛKq<_p ܇y2~4u}?ڻgaG!ǥ/dk*Ͽ4C]v}H`^?&A]7t~ib`q }܊OS3E}؛K pM#d؈Çl^l7цt_Sr'$h'O~l B—d}0pw;2]ŵl^{&CD{fhmII 1}Q Ƒ>H,M><*2oGO#B S~7&.˓x|u~;Noüb0fVwXMRH_f2`mV%CLX+jMSxrte ]O-|s3r߉ݦ i r&w'8xü=|;3&͓RdG~mMS˧`#sϲ?xׯp9_mߠ[_G Y6~S?jo֐~Mpt]/+AT2;v_@0nD.O;}1u0y.631H?:tYiG'"^.gd%j F4~t[?$Ms "81+\1ϥJb۽l^1[hx¯ 8`^rÜ QmCdk,?p29B:`%kp(Pʍ"VFudXDcZ)"VD h|CiEiO+kC7 7k/ákX>i hèlm_pqDco7۰MlM-Ke:n*x LqYlC&Ncʠޠٟu ׋ъ>LLryå O=cF@1gz4‚SѿWNQ[5۝O|Ji}af9xdE/ 2V7$vI-$)R:z +K֦-nTE%p =Om-;b܉J>JJ͏0HЌ^q3:5n\hVH&SF>C/n>jRi@B0֑$hR R3)>CJCxJN()ya`"EJAY&D49Ly ;Ӊ<'HJ6[~! }o}dMN:x|߹ea#MtXSR"!{_LWs:l?~ :O}y#.iQ )xIs=sd\htmT -DJn9Vm@)@o[Lsw9T: ~ӳK.(j%;B9bA5+B{c76 5-gEC%I6"MTR# O"W BJ(贰NzGR3ɛKS K9*@č bҠ'<? ـt(|sA-Io~Ny׻)aߜ~`Mcc"cg}lMFoO]RH'TN ё[EE,^ XE-cWf8EXf4MEt8B?u3Î#{2YDMVk0 aɨ#RLX<7m+#s[C" ^⏢|DTÅo/G=guJL\Y%RCwșH}ic؃ KؾG9؁Mg0QJBNRX;ȿ&jVgƗv/` ioi6<n\qǰ`^G>nfqigH $R6 -x,f(ǂlSDw_Y(>Dc&ZEiFjg ->Ƈׄ>nltֽn, _?%d\ P-F8U3Ex&0AX-,rS˨erE؄ A2 c*XlRʃp-a*Kȳt eۧWjJl;Aĭڂ+l^XpSL@" 2 ^_9$@|OBƩ mE^ )wmmJLe: 4nJ99Cñօ5E2ʶ}Ë(dJUٲC n|]l=_1 l,`&_4|'oh~D@I*7_x:FCaH9F!IP#$ӖyNj/"$TKӭWS"dҢ@C $KM> nԭ"݇yxE[f,ˠ-ud"D0&&'9?}a>ZhOT4)P>^7mMr*}W5i[(Q|EL~iْvZZV ]j~>V?ejv<UnzV vʺ0F%A(߽J=CH)6CHM[z =Z 1is[- E6%5?&Z+,IZTӏ`<4Odo./7\HD/7˟ ג%HȇS0|^ɓkel::kӋlY}pw2]!Mi`ݣ$$, N5M:Aetƶ^}mKvdB1cɻ,L؋sQxNɢ PǛER#H̵KE{G_o[dŸ)~V:!fmJ= 䒡9"~RKCWm ]mҨȖ"Qm:(=eeч0}QeK/BiUk5iKBȪZvYv2q񕣷̃4xiMPH%3Ȕ8&#O \0{n`"x`RNKad( OBTN}I[]d{,\N惊L&^H}8g3w=ɶ\vs2fSc+WxJ2D X/q""j :k,VK)uPde$#`O0R}I<a y$ }4A୍(bVzZ7OɺZ{PcۃPwΌd2'Lvly ץ t:-OeA;oe{O{աF<~:5E9s }H_r[]@N'ji|$dC-"?h@~Ǟ~Ǟ~cO?ӏ=5ӏ=ӏ=ZO?!ЏZ(DZxGܣ?{=qPi.zy4[hjG88wa!nE)ZI-{_62FgUm]5gֺG=X{`m m|[\nr׋]v]c>=p,DAf6L deBk %ў~s;1 bD2[QkF)xb<;xwwv7d ٤ɇT-\QI s6K2?vO]{ }׫KQ}ʰIpEm[nMEGڍ~%Dۃ{Wl+f\;`QK#jtyʙ*0w^+Db$RB Ke)}9i3BIe,k {J3{P+h]9k]7aN׮C%ts哺!A{ yBD6ftlܷ`l㰽8LP<6qZmz6>;Mj#=b(m}߫ tP'\c`:s}@u2=4{Qq ֳfHPKkR2D'R("c"$f ZJ k9`%;ǁ\г>-nKJ@jڶ)[=ۏĐ vHbQaZkVKa2k\}Y{@\{@Y{lgo2`>4v !Bk%ml*@1co*}렌ֈBT$^$D>okВ=: L$]uņ1n2+Oo~:~-6vb3,w1ЏF-esL1VV JQ/(HHzEU4&T3//N{=J^Q4Z 1x*yl, `=ъṈ+)AҌg䖛T# AfII|RB1>*5JJ C"|B~,,*`&﹣wɛ7BC]"+C} ;_gF6I*Ŝ;d"g)$$JjubIF{+R+tH%); YZbU30Z^I{xo8:1MZh6mڷ$2qiCN.Ygx_)!6˴UbDŽ[eZff*+:lJ,rzmS]ЋnE:*t|Rۨ4`}9qPi{MrB"f("MĘ ^=S tv~0ѕW;xF) kp Rԫ[.7o~Hhu@B ?Dq<٨Dt;mQ|@&ZAY3H2m}|Hg{0]zʤO 㫈p!@c;帑ˆ+M@Ź0gEzQn[}{6r~HL0|ӓr֔3Z|.uIo2y6'}0u<'g:ѐC&+uI. n3ĵJ%F-ifcQkt|'ohA9']qYgNҤ V!z Κ#9L \'C>+zL[=8 #kH@^^}+[DH-`r,ɒ4DO)CFpv"uH&D2^2h˽9Dd"D0&&'9?}a>e-*B>>^7mMr*}W5ift2)\ (q"o&?4LZy|E/[xG Z3&T)OrXOԳj8-VEلE,/(7ח B]@74b3_ٴ AIh6-ۢ0LoZgq\[\cˡBE5XFSNDֻ&hArS1@̅Mzp|-Z |H:5ͷ<fY~9#8nۿ?Ȗw'C0<Дq=jJ2N"D+A; \ӤS?>4 ZNilgFlG&.0K̒I+n<+Qz[D,Zuy?O]$5ZChvhhmѵR7EoqJW'LMEs4!\2ԣ5Vį=Y;[+ao܃"vBM.6R,(-+ 1Sw'kBF&-Y^V<=dI_9vdԝwr!pJ7Sna{JP/ݚ [lx]ve Pm/TJHvQK^th$azz:4>Bk˙!MUjuZp–V]ydY$Z υmHm d1K23Տ+{N׶[;-ˏf=z}TMUW)._t2*Y‹!OhIzl$!V_R[Hފ{U1je /SѩfH,ڐ"\̀` pm8VaSp>FraM<[Lb~^AjjKS\m떶(kWRWw"T[)5vǪ"[vuU}=9ɮ>ZI,F_+.Tyc06v0elP yRumT6o%6Y)]pOwkY[.AgJFB003>>Ȓ"N~IIeʒ#4آxTwbu-頃YJȁ.Fp'> L{Iʁ"_jY"Mg˵K%61+g0f|g>h_֎CkoX9o7<U쉄0R+RPEF s>9dxrŶyЎ~XV XjJ)WGG`6MQgy<`[ׁ^2L3m]mleԙf NbH-P$s[0㯣.Ss6[(mW++!RRBvA-%d[@ wJPIH=m%d햐;Xl$+1];]2la(N~IÇ1oO`g~7Z1Ôi/d/u[y@|eFގ-Q3>UL@(B-oQ]GTP;Pf3HC'WC!8_ΙKh°wHc9L]X~+ B%~}{bJVv'">O>O>5DA-?2m2Uz`FZ^A[qp[Cn{&2Y0V=f6;{ 8b ݙΣ Z*$TɂĔ#"ᯠB ֞31wd =!"}rQ"k/%%\2NXYh6xgO`!qtu7`I-`XG5Y&KO|HSE>0؛^_hm3YRz>:лq u9{UTA;'ց{Wl+4FOLfiq A´d(Rd @9V8dိ2ur98`sL(QmGs^p* -J))$GiUF{pmptY){T*`OO!M} tu0QkMw ;K`_ewYJSUH<](6¨?m?),d1lhR!'+7^ֿ.Um; _OSt*_%&/MzyƳ[:+lUw]^R}z)4y\(wp)BSϘ8P=d:FY^1Tk%i3'{>ǽęY%I\gi]`ǖ3vxjz҅$'`4ͧ&*9=XV2)}@JTr"SZ Z =>۰4K|ѝ2{JrGw)f.\hY3Qn<v7 Ř$ yn N,!oN<.YwHM٨LZ鄒1&V@i6(҄HEa9I1}Ӓ=yZh}D|>V=:gQscÁMdd3?qua}K9zXZH(@H2eVc%7=/O{9;[VX2#3,&*Xc8*Ou X[½Y̠h$!(HdDԈC* fll)gגF760~(ȱG*%`=VyBpT6"S_ ztR')u0Ӡ'<? ـtqjeZn6OZO}]IG= htEDGсc+j0s`^AzH8(J'^HW1ZQ՚0aXmXd2ꈰBS 'sK[v5 e? ]ec]g><mzӠTb¸FK 3S`{y#x,XƓ\tA^cJՙm%ܹR֧Sn7>:v%?6Je׫ҵ#5H">;DsW[,azvchK)14_#,>?$}ArW])d<~\'yV*d\ P-F8U3Ex&0AX-9peYXG"\JlB Oc*R'L,<H^jFϹ 1E}>xfYV7\*cj VeNxawL1-+p(x%Zl9$Á R4#`y3D)͆"03,*͸JUAP$ (GAcf(`܏a4iWlmߥhpC(J0".>+XI$5STWZ<`N{!_݌\OeJ_Q:Lj ʩ-)SB"Q^9bCp1p(rzA+H&%g$hZ#XK%Z3L[A \c;4I8:8Iz &j ]6^(~Ri0~0(Ţq%smvz3ӝύ.>6{ԬxͣDcT)M+6QRX|a|yOQ3H']mlk2suX/:Nj,O;Rj0MxHrtڴPk9 1Lh_<,hNJ &zV,c|u05 5>[^ֻ'Km:edP.VtGv{p+0 EtN?JAcSSUoN0o%P0 tGC(X1IJߒo`"9+Dm 1yl: `58. v,=_Ou|a4zFyLEorJ*kG2J O&Z0vTI@":gGoO-/4‘ a(60l6-VRҮ/gY-ӞLjq}9K %sV:|W@vynoZ_uU2ۦ[_j B#KZ)+|=o3JqF3t[?:.o.߶kK(NW8o7e\Tfj83J30%4ּV-Q|I5qX7Io3koҴ١X 9o`2OPœ4 @?&04OT/&UًZ}'vj& YЫ>,e3? Z%+$)>wUHn9?dҭq%.n' υ M:Py:j/EA~H0>|VGx-^Ά^Dz*L}o@XN~=а(&_g_{B!4 ٻƑ#UC]m {Ar\ u#ˎ$cdѢ,2z<"l֣WUY CW~3WpXMSB#D|2Z)ű=@(D?iU6bi{ϬI2)f!Z6Lfh Jiԗ;?-L~"iLRH]D!IhdCRP,i"4"z TlR PS4R(ō:*$ #e@)КBJFxJR`%=*B}dGGR5Rn4H湽On2\7꜏ۼ"2G g#iuu9UpW\$A!hr$HAӖӠ96yH!㸒 g@{#FcyEȠe`Yp/OЋ! Id& c L TD e=GQz-/#ӗed22}L_Fl4] a0}_?5&Zhp:U/Vl-+aOkN~߱ 3?O`#W^ Q' %}﴾wZ;N{i}T*%!7*Tq4oB*k)5vX=:Bx[.Q{N<Ϻpx_c?ξ"\蓝yI73JkqwJaF_ ;6*Jx3A:"$74O?s䱜J{t ЪmwN6e:g}ȱir0hACA5,| ;)OV8Ba_S},.9n_MB (7΋"K 'H VO{u3N'lp\᜶ǕmW VHUB:Z E\IYTWs>dRCŝgtGx'Z}SAiq WZ, 729ZNh{6Xme(Ң78d5j@[m-ܚXMOEq=Z|gjpv깡tvc.c5r*me-֢O3Q"-}8omR*V'r?l|gtCzqqeܫN[.#ڍ7[ݍe ќ0O c\4' W(᯿aت9kQR^IE3hf7,C**&txр%<ˮ zQ2aen0nx>8 |f UݐtA(x8! N <JI@iRHIe {NV^8y8#U_0}XmNEs.ЬyLUVO]YaC EEγ5l挠*Em)JF70fiv+Z}PV*S9EpØfQ\(NX9n\shk莢k5F6XݻUuXvfmVûwZ~Da™W4Im[#}5wXc1ϟ'!Nf4Q\ [O!KZ=gZK')(NQp9(kncDoO{=:MQ13.E2zu0ϕ i61 7g$5LL$a6$AH\hyztjYW&5?B:z j KuTPĭI2Ij w9fז:httr@SGjUIVPX,:\ 0kNbSfi̖XD\Q{"$JEZ~6OZ)P:7YiDs`CCjRzhN=V`}hJB^a` 4VoGݝ񖢼X6y^uY.on5A=lk_h"a¤=7^M^0:$1j!$:~P!YbXIbZŜ^' TX htFQ#9sY*&w>{Nj4|c8+뚲I3?ႂLG|ޟW$ц8GN9l*y/g諒-T+?E}@C MN\$Qc g.FO Wp#YEzISKQoК$6i8-nj[ip3g^ݱO&e'c",]pn -:|ƮW^E2_JiPI+(n+G^0o1>m0\+m0)N"ރߥEY9ߔWH? b W u \}kzY'n#X| h0*g_jCvW-$99# #a(,{rqG/totf"LVڑP FbP'J0 ߑCB~DJӊ?ـjdKrx%ޖZ٫gx6fYqBګ1va(Ky|e }_u:΁n_5kVGQdQP0A%j4/Yq^(Ηe# eM7g(h/ :hk>"^R*Xcn =n=Ao8zKKD[^ \Q:CKd`1MNJ=H: "m7hn^yڈ&H6JYSQ .r1+x3:d*C$Fͯ*11x#/9w>bs:ߡO8Kb ձĊ%SV?(PKn|!,5QC[HplᔐdԜLÕQE6eԋs!j$'\j^<^J'lKfH.gyg4"EχC `Zx2;c"x'PHG5"UBCCXr Mr>&VF\]XD.+&3/¯^ 6#_yn*yqrs?">Jj /m>G vcpXbxZ0U؟Wݢ&t#{vB`ACo!h_2]H B~OD.Pǂg‚T N^(+88brgŷ`X#%iH9w_][oF+B@hb`Hf4>d VZܒlw }OLJ٤%:5IW;N @>imw1qOiWGWƈ#6ƈ8H 1ܧhZK䫖vml3:"Ruu{”˭WO"gvP S߂>&D{:3jMd =!"}"k/%%\2NGYwDðq$%QƑo akل~:7+_y[ ۬oE>AQJ/ޤLUV@]6V蟄z[~N1 bx;O3+u 0EA{"!tF.Dx>DxsL(QE9i/UTsθ#aREbјaKaຠ|/][cOF)-h}D]~z'zP!OP>8SIƘc|z;jV"@ F/pJÃ& ky 7˵ GpRRԟ0}m_s5}m_s[/} _r5n۾涯kn۾涯kn۾涯kFokn۾涯kn +6 +$bR +M!bT_?}QJJ0X(O)^U+]ox WJ K[ЛK7|:h$ ŕ(x0,Dk!x7w#p[U݄܎>/>q?J~>-Tiґj9S S1RG6g&0NTaI6T)uu654Ga_Fz;"bބj)e=@Uڌۧ yWO=@Ӻgùn.Y '9 uq+o6!l,/$&sh w)ܴ˲5Sۧ{#֎cnL`27mh);xYͱU~M T̈́Yg9nbv7 :+N-kf"IwFӤL3U4R=UΎ xghMcN7}^vyӡJV5LoXFC^_3Wූ?# ~s_}Fh}CnW3ASno7,GUuWY\< h{1yQso{%7F8HtLpD儇NiD͟e]}>j)vؕ4o%4A+kj6?7>UC|#9Ƶuas_CXt 5s-EW{C}inxg>[9H!=Gu'd\wmǻ+ s[:n;ŗHND'N57/;DxpӶ|n غ/Og]?5ob!&gg+'H }7PK~[3DW9!ҫ 7i9}Iq857éHv.r#%[B\1c!QGf# |2f܀Uo,}q'a =t5wbn¼5q 7q5 聻^7W zڤ߯+wC_GA`RGTPJ-YB 6;˜LmY8pL}&I]=۰~S:d<7`j?a k)uq~ b7B#3~x8 (d_u3YXc]O_-gu#10-><*FO"|zΆؕi^WyiVMrˆƠ$߅A'; jpΌF@CדAeD4,/\  iWw6ogW +-{ur7 F4|4ve03G~ip z>mfY_|pް||&lo~Ic"'Raﱼ"_^4m)[ӯl `+D(B!  -,% }[/[ Sϝh ,c>@f {5bIڡa \V!̝VF@ r8ǙT(Kp3"ا)Bc.DpCLDn,,xGI4 Gv/g [m`ig 56p51}DHՅgJ?Lvt'$8/rk[|,{=04)+K/Q T4 DȐ\pdudmEy-].ަlt]Wl ڊ7OPf iFFaZ,q(1eNY^#05S0.CbĂE,Dt쎟OɳИb`%}0>3Tk=\X*] 9$XG|Yxa䐤0p 2(B; E N&x[·0эfh 1z~^VuV2f ԅ]Xrq<(\ڜӭMv"XC&&2O1VPo͒x/C0*UHa Q AƨU1  | \3zF+-a`dM9Ma[ JpÑ򛹚 q!Lʀ49~@Sl5 ҄B@f)#p* XEdlU.!L3;BjAZIӁHOB[)'Ӂ'JD5~<,4sD*~n[Hq YUb %D 9X&݉xKO5}"+(Pʍ:Ə[OFEdQ0`rlVJ9V¿D h$ml>o TX$֕$,s#IN?^o܄TT2FnyF=:2 p&7\#  LuDy`5/ϑ8 g4(kכa5y:[\:߽FW|jgLG eZ30{-#chnDVǼ'`DW|>6nng<u$! b <* s~]`D:}ې*JkCw^/`pp7;&/.S]C.)EFOn|S=o`P2zEx#t+8>jK ^Ȍ ^`A ƜZ؊琵vVޭ߼;l{ (FZ|4 jm* %^F@HHL׈Tm؀0ہeߐE.qVq3 A2 ('ׂc}9@AVmXe{C:FHH5pFBb%#1))DXҠ8KFbu ۑ̚5-kZk @)H z~XWUX+F ?wOϱfml{w_N~LS-~sBY6{Cs/lƔڠoL?Uٮ0ݬKٌ)M?m~i %d}OMV#F옣όTẅ 65fD> ~sq׺ޜE>~c^| !tGp )-ڡa, + ^B[ NUdJ+ʽv󩔓ѹۘ(H^ 3ϭ#IQ:mRtH>PǨp)"tBI  ( e5BI`(,g!)Nt!m-As:\M[9\ eγ1/8S}FǶFC[)k3@^6gzH_)6<ٝ`+`lg+lliwGVfC*Hd2*#3/"#FzϨR %RAwPjb c> RH9JFō!2w Ыi4Z,,i"*aEdH4 ĩ:vF &9IǨ.2謰NO=_I?._Oo *|\hq"HID89ja-q&tPi j}$VAJE[CN?k-m$IJFDAMl$"c.@*U jx8du:T^y"&kp>aN8ˠ$xQ% CQpD0iץ"nǻ?8oys{F|qFF33rB`U2 ea[J({>%DAeD`nw򭔰|DX#G=\^qS>vTh |V"9CB(% QN=V`lQюRrvA$<;VoKae%wY0>Q/6Ynʪr$>74HE+y"rZ#α@ ,r EO];jni1tTYUq2g5&:w_Hu<8k L+ѥTޜI`F00}GiMmmIb.*NoCX MZ#Q!B]ahWl:n =LIPh} G>W{ʍ)rhm˟SU{[v9=:2 v~t4]d#;u3$ JXDTkr!"O'=v Ht_q[X P'C8h_^0+ *ΟjRq U[rIq~sb E-v;o뛽y;0~8n ڗgϭ^v2.@s/U+'|]I0Qgz`q\f|q<݄H)ܟE3(QPK~{,4hJ=>XX8DL!NDO,hn0Ka@'aaNsn}ҽߜKEg{tMV"R\.f*SA=|n1g07jn'MYml.7YZ+[Mj:OҜARJ2rbgI@5s<>j1§¡?G%GrKFxs/oLk9ںҭ'O\$Nm>b]nmơIg)Cp`tчrNDWcOe\͚ D0rr?Czp7l!qу0N[~{8>]G{]rO=[MCG7>P%ˡ}w](g -z|߃fA1|:܊ ݣWOXRXGJ)" t+:?A}l_精?p9] lU>x)Ŭ = ԫG  hy#&mpvckϲ%/PiYI@':}+itsykW)oI7O]bU˽hP?{g;g>x8H'=Ep7C<"먒\kW&l;!5BTȢǘx3)E]&\GJ+?qN;qÏ>W1XIXă@̮$ %]93N~Lb:j]6yUdϻXP̵m"O:I9;0gѫ5AHWn$B堦wUX^z3a66\[dE7rhJ2Z?>~G/Fc>5\OP,fVt]UzS\] mP-]-ޗRS dReTFFӨ|2+2EgS`\^lDkʏG0pVPb6]qjiÞl` 0nAaϚ[xW*wl:Wg]bx ILˆ1f- ԙMSD}L  ;Aw,rx.qi4|%z@#E Ly!r+ОgޥBN/t{qذ>no&opM&/C97 r!zuZYguTw1M; Uc"88Z좖1؀ľ SZ O)ߏ5ReO-b0N.)`^b 3GZŤ-G&,̄ t'q>0[{⒊)a!@NI^S"HJ0.$8%5.k!HsHaCP(fuB"Ռxd(21i)3߇BsSa 湺 $M ܴUJ0oFBSPL{@cDǕJwX>Ngb,.ͼ'"JG9ێ3<B>J0OզegXE& pՎd#HXPy@4$h,\VSA\9b|f lP\Yy 1 Q qL ʍs{P(<ҥ0(¾1`Q hNJ0/7Ja3'J62:a cRKoP(¼řR2 QKXMXI>J0$奙TqB38eMu>J0)ϼgLXC@|7`2>y/]0o.|Ll~x 2*C 0O/r8"L%FYzrb2"J UAt"U G>Ӓ Ω;ؾ̃n{A_qi1Y0j X[ep~sZ<*9 UV9_ !J0/TpZ'e04ٜ{"4CL "ǎׇB (DPF-!@VJ <l߇B aKf*XB#a`bK4)hPy Zyn)a8{`'| %7XI0V8 b2`1;?߇B)& f~2UM?.l!Zͩ700[)|4JQ!%XE@<] /z'å!ᣮIRH( e: 򜸉i AS#݇B W8'c(Hd"NȮXZd0oW3(M|m<FBĬނ7ƴPy @a'EZX4VVhlHPymJJ0eq7ECmG[)d)6t| %70 AB>@iȎJcpL)ύPyKP`ػ6$ x~?EK mn(R!)?rOQH#KitTuU]]_;qq&?./̦i'3,-pR7ZbJۮ^A 1YG;_ d{CZdsSț#z%jd\'e&ԄB&դlBUmB'>:h0i'}Ss3e.!-t1xkB{ڹ(\AAI=.TۺJzW7% g,)XSeB'FtNMCJ !EƢsH ] [GL$"%Lqő[b]EWd}6H ] >ѵͣG'c $kلHfiRo*!-t2籖"L ,)ˎaq2y@A=?.)[F"XF{CZdQt okI. '|l29I4\%'t+-x۞o7Gӧ!0]/ o?L?\p4ʣ%hDo>y6Wp+u7땺_~ŵu>GWKDWb(D5K0-EdBM6;ː}Z~UZ% !E|BRQIBNH5m,U",}pPzlc9C^דُ&îiВNNm Y:m,Mxp\c1IB^d8HƜ]5D jDD,ʱkKR@ȴ$T )GvK%oۜ x Yӷk `UăO82 )%Koo0<\e89oy<8 !6ʓ-UB"8H Jc@(0FdU9BVYPCmBad8%4:oׇ<6$>Xmu,;ɮ=` 9UxNM$<d" /h=.IQJF,xD)e 8jP ǏLTqm -:Hn -ɣT^~%G_*aR L+]ܧ@0`1#`G`У-L݆Т*~Аgl*@a>Mc -x4]5\ JMfIВXpٻ?Rh^D@ [L)zk LtN.:Jc->YZ^u9FhFǵ hMDb[*\fvNm:i~y^{eUUes?W34&.h{ZFM|p&o_I<o ak6@Cs%ĐCAmZmd jGɧ\;)%iN365ýd{7$ .2ݭ*8%;R]XMdsSVNon2^Tvk+Q=jūRkt„ߠr;Y&?d$K)GtA ~U'7;icؖ=|{FVޓFu o?9dlp~2.]`.|OrcE3m]O)m^S{=N2*!(6X5=`j6US(LA E8-bǏ"9S~p\O#i>;q{`m&]Bi::3zG&0tj%St%u!&P0ZH!,2*TBVJYp:QUc ;-x+zNv,f\9NkڋDGFu&]8ʝI|uyμ+rEͮ7Nv;|ֳï8yOYzaR,ǶJ>6ԹH7N6BC#Fx*ahP#˒zPqգ9 ,%П]ەq+mjWKr5Rf}ڟ hdK(cXy}w֝फ़?#,rĸ'}]GMdš]z~)anG0^<_t{~c|kq}MC\Vk=F65uBg%lu`U5%x-uA7 =|MεfvjW@qƃV}:r4 Dx)=Y&2tڲٿ,:w_{lMmٞLuXQZꕁtFyWG &"g|MB'K1=HO$ m'm\K! p 6 jɴM R-YL(TF) |!œ!ukH-VgaZwQ]Ȏз%h-D>VHݢ.UF,iE_f,'(kG@̔QEX::2#{p9MMg<ڵ/7#vt-;&|ѥۿ OpJRP/{iXz᧖qzyu>gMi:.?ôzƶYC֩CJz5jM[mmlQUb,zC4w9 ֣-?&V#޽?y1'o hr|A~Y#H^89{y:F=|;XxEӠ7޿..䛶a5Ж5bVfSZ7J hd wÏBhή[4al8K ^ɔ3f~m38Q B`N("n qه_+8lHH,_TȌfԮmRkUj=~ߺ^\ܩ%*\vh[Rg\'ͩARY}4WrZ\}c2]ʒ2~M0yG5jЧ/76pBM# ֗5 ە2:ϻOw4ZVbU-z h/QNvw;zrVCNTghF %JEaQt)AOHDfovi -fϰ6KZ&$ Sb!$6+q: (_6G WMł")Iy#wpL:Ӊ8ky.83M9?T N4gXlNOXsю$#vH"dy y\Iȋm>ȲXE5ȥJm\T)l##w"{'lU8fuLBD:.!bAP  ar QD2ɪ9B71'B JYzJ)7{cHe.s|(Ap؛}@~#%t׻m,H\wٻpo+ܭv+ñԟv[Aȫ]}0woKZk|B{tiZNSXW ͼw11xG&l hڶYb&|>/_E2'gaј}t BqpH`G$J| A (Pի dN5d5-|>f%)jA AXu\? 6aQP]ugxmȾ 陦}V\RwR5E% -WЧδfPj%d)]mrc)Fڬ*).Bxi.x Rks(R-usS0Xh)Kև 2UsѣP#hLJHrX~^>خk*ncʐȬUJh |ͨ$]eŚ+ W&eFl {SW}m-hkߺ=t >Q8PN86wiq|1;dE +z_nY[+6AK W“HDsX L.! =UyyKumוfK^Pj$m5߆'PDuxCnv,<3x8s Mc^4\ԆrW$5/#V6>? 3m=eű4^$~2y:UrԹ@9JcP*JJ]L5S\8{ x$1gxyU?旃_$_F@[j%V0)&J{t5C(sVZ񼠝g3ūX|%(BDX6.,Vdc'l,)c0dK]Q ) C'@pVTӅZ,di'K{LK{F-ꝕmSAܟ}y3! [p2F<_l6y[~so(h8נ4ٻFvW6/E8@]I%> xhG}$3_bbI=-KۘE.V1ܱcEꪈe'fςuz9KxtL^[cw"L.Eh(*!8.VΡU%uzکĽ"= o&hmg6U:c}~-ˬﳕϥ8-:wTDT(TI_~'2y71AD8P½8\y&Љ]d(^#/m,#gv3K|asi57ٽdhu֯8eX*ۺhFtr{&]v_fBhy?N;xKgB4?zh:-RLu<ژmw%^ӟ {8o-<O>7hub"sh%Ýk=S힬 kz }콍zOlpwsSqQݏ?5!BM0os|sxwt 2d ( -(!e_TN4EHg=,0q9z2(Ԩ%10d>n7گMYgB˶?ݐC/%q4:4s̒.p,q#,ebJ`sIځ P:ᒵJjF{I( !d |lV5@BHwqL<,~9 ywSےQE#GsfqR-3|dDˆw܀0O5ϺC\]X(;`L4j֫ SMEȤMaO_dKֶ W1=,rB{]eLĵ芎ߠbҎ 3&)r%=8[N=;~Al͜ {?\5Ys$2O9G'T;v%a=061H˙VX1sxŖ7L78xZ'*^'S[l%. ;C9ZymkjlN1: zs{ql4h-1  f IieG"hU{}h94 zNU+ɾKnq/>N{</RLJNǮ58iqHԴyBBlrdɕG Ent b]dZLJ) P kF`@L_vu r!W%4qzfp]) |Ȭ:.}B"!Vr.*B"r wD(*d ~T"INaS&wCԮPGdM!QR6ЎHo\̭=V՚"#)K{)-nvر#ހ'U|Pi%=DgDT <,xDu}\ƙ\BA]Jiϊ֡j}Wj/Q1Uy* X tvl;ٗg8?J8l;w"Vw4fH|E$Hf1h]a@6ڈ9~.Vdhlbj`Qz;>T05ƗZ8K ~G u0&gyh,ӷ _2+qT'TF؎RuT(UG:JQR F:"\ ltP0`]L)xgZ`͗%ǎGQ\Zyu,cY{YnH #]ƶcZ'bZkiܔ덯я?C d+ 6CnA%=3KKeSGfGbgύ̈́LZѡ&K4D0 &/0c3f󉗳?+t -YG^( ۫?}ĦCeaxNKNHrL8e(x,< xgdM[dK}!otC+Y+,vŽo;4kѺs3D ޙ-yѓ"KZ)u4Jm׫jm9g_Yptw7)8-޻X|0׫lOx;γ~ ^QfXW W"@4Kg,*RDIKN/8xZk ui"J-+kƃZ-JTC :9VKȞD`?<Ž@a])&qt7t`]!kb%D6(яt'u<mu# $?~N4p Bq#b"֖1bIL- HHфLe9 D%"F"vk[ 2;D9/:Z'u%֭ .H@(oH(I2B\L_j}p9})gXysB_oS ?q gy̿yPظ3Q߇[vylѥ[Dj Ipp%ܽ~䒄EqyuϯnQım<]'dn{hx@>rmjtMOt!(QDNf'oIJ}SьhZOvJd0~vL>ϥS Cs]5$[fcë?- N7{ٺi| 5ӇY2HLxF% ^]0w i5_soqoض`%;ۯMT5Z ^b4@3IxI| -1q&A[Asvs ;]f8Lrl֡Ah Y)<٨iTpVEA qNE[6Udt|MrtͰ;tI /Ċ4_772Ao[HІ#c) ɂ#b%h9ti3-_.-2J2F)L2"h3ǘ%Gܠ(J[Gq牤ي~ չώ.e&y߬nȦC6IC&<4B)À޾`>MY8M4ܟ: \ (τv<3 5Zb 7 ?sl5mH =5M{K tD ordY>nz0ekWwꌋLľkr“6gE"ǀR!8,<%gP% _w; Ҽ;/ң3x{_!ӿL%9~x_,^Tg]h@Q(ș2(ɻTAv} :Y !P.w-o)c/bP[tH"A`-ymwnk9!B*2iNFOMS*|ʻ:}\ӧS[gߍⰥ_JV5oYETB=4LE IH#j[Lr6'#H \l*ס@t-VTmPߖl7}O8+±O0?}ǓNL#Ѩ +AIrTel4޵#OiE_w`n`~ dt8O,ba[ljrF⇚z`4EKrd]g;Ʋ d" e0,\=+Bdy? b96@WFcƾ!y֋7LuB_,RS$dVeGD$ 쀴ii/aQELwDC3/ҍ}=.<5VI;ve]vxI[ӢMl5 Q;U$~rR9Zo$dB~DTBW9їM#^{Ȭ.`LP0)B4*Y-[+_5KdPj6!|du_ R):*:i)Ĕ Ipai^}`5[Rh; u3iam]$p%P؎2:ik*W%sN2 xS(: ̟rաz@.E 䑃7̮t.0c=@0v@fw\X{J; mco6 g2zVw mj$8ZO}S6XZkncv5:9D4 7K'fX9`J@rIyFEH@կ|uW> <%FdK*Rq&yN};5%)|)loaǧSj-|:5VN8HG#Y!ԏ/|fm`x6ku@Ω"/^Ѱ~nsKx.tM"zHkoܧR3y )߯"eoF_fRY𘛚˘?돛9352?8jﮙIݴzl60|o|X~SMBϿn=`PӹuU.wH3&/tJL2,Igz4A,`X)ujߞ洏%cvm3`io-RWdmvm~[ N%H!ڻLJS I*!YE0et) 0Zy*iR  涝']{td9}Fof}^{sHŚ\.HnlWMy;pA{&QOV9w?fsGuj`oQGhMr2`$CjM )P-{%fdR7AP1aAh!X d5Er>p[H {#̟6:l\x KCMC Pۻ~p#ReXnrnuh}{Q~o_ͮCnrUݻmyݝS5zvzN&zzt9]h~bS +5wM鯚|s+6Oy/񇎟P[Yrt۠?nw|i|gn4uA16Z:Dx6Bjpp>+.8glrzZ[=N~뛖ޞ|LS3)6my} E~52d *G(-ꂹ(p1PT^472 . C>}o%6EgZkC[ UciW~o*iwƦ/2"`en$nbQRBH҃O*JJ$$1'R@T?d 햞˟f, / -nqu5kLji|ƟUـe'ȼ0`>074[pu@Ġ5+DGޕ{v*+kPb}ͣxW'nW1lu`1rtma~1--d5?o,K =s`}\Yz}y+8>-!v3\6|!-V.W[h'|&םq+e g`egˎ~t.M;JWuD }=TZ܊Ѷ oJ 7ޞ '=F_.Uv4>5; Z VpďzJNZ4"`5gn TX={7? m\7E.K4~QMVsPv\w߮ TQz؀-ZZL[hrwߖ5WupUCE _+#LVjQkFE_Sf"Tp[FKY ڠ)ѸT1RZcƈX)]-f3J^J0cr,{Tr9b1w:~ysG)ޫSB/t"Y*) BTxCXg00X+r{C-/ΎhxJcR3r Q$)SBލ[#J/r59ZDQ1e+.g댉Y1GW=㬷숳Wa0[Yio!Cr xY𫹍4x8HёQ'%[ 2Ko3ӿ-|X@,YJF ( "$3dK5S j&jlГ= ssdI%`H(Ƅй_t"Hs fQ`2zpprp]o4;wKio2 T(6z/_+̰C -xE$H^R$ i@[4[7zv(D >|.JR<@1u9(HPPyy `x0NzB3:k4^*cp=#+7i&k7w/}'HTClҧل7XyU lj]sjW.K4o̷kuܫHl>D?5f:/&>Qy{HsHZH/y3,/棳n]I3 'i¸ֺ)t 8n&g}& >(A#'>Ӈ;8e=yȈ_Odx:'PxV?v˘mkfFi9:}rє U+~Yy|1fNOm*^&z,3g?y"bgJm7e6 lk=zfbJwo?NeWg(Q@bybxY%Ůǫc8 NudL:J,u0FJŭzD"84[k! R +/?W 6

Ip n\Ǚ_]?tPmIu3{LH&%ѹ8:FMGa6ٜٗǣ|OgDgGa) c3- _7?{H0S=fI''gS28,y%3@{-ɖm$[%۽xȺy=F/r2:v`4$*rxV$ڵѾ_Zٟ(ųX5?[P<0$8˕*HfvQ C˫^!"]Ȓ0qC4 Q$jxInk p^(ڗm5峻6H 3}?~u|MEcĺژn$n*pPgoi#ӎ4J&gȑ*qLF"Aa41AY1ݠ%9 Kp١I9.6MҗWW΃4> aR#iT*cN11*'ZM&IvK֜|&ˣ.9-k|>eCv ?XvXQ=Z!J;_Hk0]+ gk.C`]wC^GR}]ҳ ݧ^85V%Ok5Z*h,, )߂6*fC6*ZAQrg"28eł<~]0V4(kEoi4Ѹb E(+%t`eEhAcCQ)s_Y잷 uG_QW '_+dm:C2nc3#1-cփrhs;_lL0eLN4^p(`\oؓ=jKE߾*klEϚ)XrL$-&Ƞ"hl"&@vARbk(Vsh)҉i>rmĖYw`wWhʿ)6ܸr1mSHX!hH.1g$3&XFWJ{VnڭC^V_:7Xf 7ȈaAuʅ  D׵xtYiQ9LZ8LMK|;PۋR&"Z,q1ϥQ9 PE&Ǒ{Br0;#֚}oLyrF$0# g+j1f]h x#À,v1lCv F[\*6fVIU}EV#5w)Zr_{G,6)r,.xqR9bˢT1/G| T+qئ愚B)t> LQ$ɡ!j\,)C HG,hsְCb'qA ,X`ZR[:\z@>*1%c@3JhKCm"Yk:ۉ2z< ?NK2RG[fW":&hʒ3tTa[T,IJ= =RN) 2Z#՜^8+Qhɺ{0ȽS0tF2EIm|](ys{u161"3,s]VV{YWZc#-[3X'#9\% Ȕ}&y!bNZpc06IDEXˁ;%(a[w#-rZr`MR9,ian+,IH*p-M9sp ,aA3I>f0 }0pxD+ &.CdXQI.3!D,,jMڪꪈa,יX\F(` MhkȵMg;$ <`B:h\{ڻ4 o$䣧Gȕ<@,s:wiֳ>y[QXJT'FLH#׮\%y ̌)'JI,Oہ|]J_<g8,fI٨3d7LW.ERR2sÌ7Pbzcp[@L3 "Gp;+^iճ˥t-n|ϫU !%+hOfuާ m[ TnE6$%b"BBO\RkM|e_tCB̓pGɏqǜJr\ BqOu^rqn˕C?.š#tOoTT**s{f#fbJHJ#uJ<~&)* u e\aߋѵ#fTmvS_GU6`~4,R? oh#2 Av3=tPFmԙ036sZ)M @oeˆkDJK7C$DXP{C"|B"~,,*`&﹣wɛ7BC}/P;#˿Όt1D8ZT:w S S&I'H:0$=ժFlI%, {Z頃@ &Sv6K2,ZZL̀+☦jsm]WiٺjڬOtŎ^UkR x3mU"[pY{k9c Ì:@[eE@ǁs/2,rz[LRS^ы KW <|{ ֗"^Y/JuJϵINX(>2Ez<cB&D{xԃʾ9:jIK+EЛi1 mhHiQ7_~H8e@Blr`]z]i<@64RjɄil+hd"z}S65' 4s>(c|$]HR=Ys! +aܙq{T$m cCe,|@!*fftL_;tS 4H)ȊmKrz[(״*oW?x]kcVس##c5gG׳"> 鹤IÑYK#J 1xWy=%ڇעD)5JdJo* R$$$4JhIBLul?nuZ7LI2gS6p)=77 f$8G2.Lô\ZV)IhX@iU(rHM f/m2%{)egCl(ŸSzv}NzIYjjBLGOKKdOJE6bLAbѠeh6쑁v.Io7jye_T f4odbj7>?M}=]~n1:uȕ_"S{^~w[҉>AWm҃ӕE}ϥX4)w"M8-nVv=;㠘=p?3UYJnmPzh[DN3ss(~ߊD[1+X'!0Q)!7m,}r8&#(ZeusaVyVЄzmWzjH5n ~ze'qjqڐ"_3M\8^j,nht{et38G-=Qfl/H#طe7='WEK'ÕWFnxz˳{EwoptjGET 5ʌ) 0>2 [H`"SG:VkA}K!oc`1`]_<#Js޵*CJdrzO0+M hBQ"DάE-بr%'x$^7y YJOy˷Saby\GBؾ= 75ml74qy:lw5x|YّՃAq0c,?4L"av)6gڴor{}IV5t\b^˝{k<qyX\),67DTȉ Ž%Xʱaӥ\O.o[o38_gMJt6q־O_@q7ǛLP0ƃE?~'{JLK F`Iyl&1e ~u"Mi27ö*JϿ>Ac;}O4+7}?P>l#޵#"6/śvpgÜ DIv-vdYv˒mHb)6]UXEFd*F.F-zʨ<w$&̀RjK"B^`t3T"d<_޹**$TF ̊hXQ z}Z#Kq%MA^CE U~Ti+rҺu,|Q2zRZ#9hRsf *@ *@ zRZEV7_]P*&mvD+Ǚ$%LOшPT9?z@dDK@D oU22-\$Fl=M YR:IY@8:o m6KLٗĭ !$)b(AtfrL'WI#+ۉ퍜>MI!Kg˥$kc{(H6Z3FܝyKl_qV$WEd t%laS(G_F*LrZJ-"G_{tT;'dL;ݑh"7pmƑnkh#_7CY}sKGO%[*ݼC5vԫCFKƁ(ꊎs„L \'PDY#8cFx-ZH]Ҟ-U1%T۱w6gclrlPV~wW9xa }$~E,Yr97GWv\6o1/QQrgy,׆ZJہ*(>}z읖72rec$.T42b[WQ٧K@kck?7Moށ%i&4|İߋpWf$Dv 9l*DH $ )['lt&Br!#?#?R!I.\5x%0UQiMesԪd /@/}ZQ$%^NҢ $ g.FO +LsG`W[Ez QID5!IxeS&\jqH8f Pn%Iky뉺{Su1?R3΍5 =hP{Os?ݐ7TM_"^m>Wu;FgI.v){~kY[M*EGg%z糫q 4lL7)byOm/==NxkDxTI䇤|q?fuXk= 4-GSB]Yo-Mbֻ8FM?_Dz4Hnu/5='˜Pke\Bz3?~vgjһ u |\2=-PX8agcrv$Gtl:e,L^%v$D1zBTcqpsFb'?)ۉ89(/o#Pʪ:Q6KKEݢ~,[x6fYߣU+m\z4 I5\6 TH6r%VֺC7|/x[Iz0#KԠ_8O˵f?6,I 3~ohup{.`=cUy "eg+p{N )=l8-1/B 2%EI Y4%b`sJ= u^ nT8%WhnN8J˩a dpHuTh9בrFqkLbm$|RTŝiToω9|1ӿV:g}JQ$PЋúȲ@$Rq]ؐ\ib U*ZAZ|Ou\Q$.vj% vÁۂ@)w U;Jj%bA(Y ĉJZIh,I%4)g~XE2gGd^i|v{hO UZ%&zeH’bγn9=I&XűْVC}$&ah-l v]pvh'*FHTJKkNb!$R S NKnDFl%]CR4t4p 2!DQ0`8^PXPΕrNEbk/Qn jA$.eV(YN5Tx)1y\䖢xDk$.W8MEL. + hѱұ\|'C5^۬kbu}0DGuOӦ>k o뫾 '$A+UŔ*Ǩp͒Ii>9)'Ȥe!4$iuLp@ !LD}dL.qJ1E:p!(\XύEp*!K@s8XO5)˔T9;ndg[wpmq:I޾)]'?Q@#AL0z\(¯xA=vPa/hᾊ00N7>T`вR/"f 3ٮ?miws8KܛYc%򅜾ϧuC߮~_ފ<2YșAfjT,eEry QFI{ gC:!\4hZѴCjګְ(E E,SM^t'>͋洼{%H.FGh$JmT#ԾJ Lu.R~S׫"T'r™*1 1s*|H ֫S^~0:z.|BI <XC#w&@)Q P)1Z R7TJ% (ex=@ۼ&aܦ WxͿg9ֿazu7Ӷܴ _ޑah3wKcbkm+!?v9t>$s4kXxQJJE,Moq7۰FЎ'iV@|1.Nf`Ҙ U5sXZBT5hUsA\\4zBEsɃV i]K-D Kd )/x8pUf;ߴ~Vn}}vu uww:5y'u0O9?ۻNwth>uز–T6zkl%qGK-d43#{%S϶OxSoZMZ-y}( VB ]+ @JxoZDF7%czUZ!| PBX]y%p&0 @,5"Tyzr6du#_Te^jݚ8+aok5{a*Ԥ>כ zwNJ&xi? AJS ĩv!ă2L,h5T"=mfc}3!Z6I<QB[mΩɢ]k ﳛ*NQalYћ7'7TA҇*&hȧfgU3),C-qT9nV% zօ(Ȫw7["ӎ.swr(ja#[jxy7ռ yZtE3hfQ̢SϺh:?bxaz;։Z/LDzիe'/%kw8n]ظm]6XSr+!b鞖pY9M^hߖk -n)J]>w*oqjށt.IUkfCjo&h=RVab4mĤM8Ѳ-TZZ @'}=}37_CSF%PԴ8Ny; t}zyڧq(*wfE­ %xT{ Hb<$2BeԝVry8%&|]Od'p9[tkV<\0T:pĤ-ެ)V/jA(|؋q]3'Wa G+_Go^QHj!z_Ktq^ TG l+`VP#KrpL&w6?Gv:v>tKXn:$y;TV.@@G2A*<u Q8RdTiB䲝~y\mnTcfq'6~:ΗO,I&c- ՜2Sv!-gBCk~&+:\AGж* $gpA7RՖ͖BV O6&{$t Gt2G+̕|Al YUDE#j$l8k!d6hJ0.+)dH\71lY?7Ij֜꺳=dsmמJ/-/V:)% [yʯ\݌Yۇ|bKίzk^5_/׿.-B!7a?!ƒ2yჲLJZg0 !g3r!wi'3^jQ! J1R3rAdTB{ @AD)μ(^Y!i%Im%YC:cBV8k֜qSE ADK@_ t0]\{ l NUEm|eL"֤cR$cˢ8'V/:dctߣfRGk#]$ӯ1.{qH8L>:_\|5~]a|aIy6@ 2\]:x h'q˻:F[,N"zKG޹Q2μps,p}sVY̾>DJ:E`&Nlf,a`I LD 9H+^At!ne+ bCQ![Ȥ'Jlg6e]hJ5g; 鄜Z-m8y\ mоHࡃû,(z#gL3{$bGJm?(  _VUC6>Hr^d'ރēaqL~#'ܪ<[.v| O[*WaXn58h::f"t=_zƇLGл>2 R,Ξ c%fW%Eh ߸3sǍ|ݯd\CÇX y^9_nadDgL w*:dk$zmY;tu"bn: {N؆>ιs0ˁ̹/HYi8n;{//f Qкw; M;R7Y>@qţ 4ɒ6Zۯ||4FOB-sw;`[Y ncmh1wRzܣkrx9R9]qXTn h/yO<TN`7u<1nWeAg/&r7=33O2=0PDAamg?x* $ ZBJuoix-ƛ.7m9ֻ2VFϖU,݇UdWYE9p*Rc&؜9wWm]o}],|iw<$@:~4zZK'z.O ByxP<}"tE@:$*H>t| TXJCVꢠO50[,fXf9fsxRGE>'Q5J͂ (E䜋E`GZc*4k!@w )/hcv|^䋽!'V#E_ϡ>-C}ԎGncKV6oyt۫7+:݋HJWBRĂ翣;+/,YIhf%jcכm|-/bY_*Jwpf]O>^ '~5ߜ[~ex5X.q5XTϭwڻ)ƋGZr7L fa2 )eEŗ/ȣljfVݶ^Im)^~yvaOjuֆ:yk1UtN;V! {} H(.D6$''Rj>|VD~H$t.AS1El~l8hVo͵Q8c*cK_WV{w3b*rΧG}!̙HN_tMyݷv/~djO/IC/e/<  = Vn"knoG@:2X3lڢQxg86IOgClSJ<˦J`$&T *F a}"MpB2A9dx!rw7 4fkM$mM6Y dEН(AsNfq؝PNcwi"TDdtTX]ِ uBNDN0B+C?)B#(J[dIY HL@s!gif͹ߟG7]_㊂=^g%gN2Q>`Cʔl1d-% (mdJB6) Qi's)#1qTٛ.(IYjԄĨ.̐N Ҫ9U]CAH9 PꤥPS*Q򄋚SX@BR|fbilc &|EBBPXk˶"86+|I-9XH j޸3N c(TҥH]E[BVX3!{6 tދܶkL#"să/.?m+Xc;id 5DaWG{NMyrNͽFXumST(E],lvR+(LRgb)PҊsDZ4'BpmIH5jC~}W.Oʸ3|S/9uqrfOֹsXwKz|N /<\^ /ح`̪ WxX#Q.?e^3_wٜ{ъﯦweݰw | #ysaܭ`Y̱XSs6H'\pD *C @ HlU<)O:Zbn#R_ùo R篭;qs{iK]_ӡɇTJB`N+PT,z5 `*^@9`39df-F-Z)IA&aPYRfbRiA`-S3jCގ9uu{*S6Y6D5.i9nR%sZ:}ϗLNC  yEonB׿lrpVи8[R*!׊ɪ'G2\)mN}a%Z36k[Yʣf.Tu<£ejߦ7Ջfݛ.L>Nl=CM 4v r iV :U)cab-l EH5h] 쎳I`g=8Q 5`} N94Mz4MD#t0K`؂H"-3ƞƈ-ma,2P1T(KRH!:&*K[X?؆ 3H;#H{;(Y?ֿ i~׫{'/KؗVSCT9aX{X\v#lOe!#`]&y"'Nj %%ڻB)d tzL.3!-ENF|r>ď# ߘ$l/R㥫s]MLr!|ЗO(jQM?'! rfFтP(:UlFTBCk~&+:\M' ewN}} %s "y `!Ԣ5$P=m-'*"{Q[3[D1_`\V*S.$EY?ѷN^Ԫ9;(=If^_%o<င'3b+i CTnM%zIEmDL^!%#d|gF`Y2fHq|Km X`9*"D4@jU>hP^hϳ(HH5ř++4$9z YPtRSYC:cBV8k֜qSއjFWo *>bE܇LDEЅ` .(A.΋x@K.?W;G[| Ha#H@zWc]ſٻFvW-Y H-> ھ#:Y[e6˔%{MX_kqD =IZՠV:=ӓ0Ik?V: "K &Sv6K2,HZZL̀+q6*$MFT& oV7,x?>DL[H;&\Z|t-vQgh8pZ2E Cm,"4q_ 苉-F"2_8kXT<=|K,˅sm*\1C1,@ Aأ{,Ee!F!r#A&E7MMISR@x,:g39Hk~& 4CI)Gv2!:cD& %hCcBQ1E$6c"uij c\&}[o[⍣"5[Դytf-=]YDv>ء3#B?GGi!I.i`(o*@`UÍ%Io>i>$& 3K%n@iE](TIrHM f/m2%9 zWR^ʰNo#cB:Ct#%'dNj SQDDg "z1&AbѠeh㋣÷=;#7-_oRoRa]ާ^9ס!ޔAdBLUnZv{{ٻpˉ\m7Lq/ ,_v+v`bҳ~^[Yo}_9ҒW g͗mոuޙ`v͏TUpG)k7t+b)D+d#{RB(o &h]d` k֍=YM3E~23վ#Om\_eW,[ͭϤK?WO׾#c΂w&hd6hl"iV0ˤN[>sah|?Pta ˆػЌ߾lrv&>ޤX0C>^_nwƑ{Bar2ci+ *'gD 8mRk]XiC[3Eݰ|O~jUHo\"ìet|s|P!F)3kp 6j-,\IljKfQre;cvy>\Ӭ;W33VoO'~i I*j"J E"6l8^?|m=持3n]ntŬ8[k,,cعX=,ue߰tA6yipڲ@3=^)=x-x1ԅJ+º :ױkr oϓ`g@ePԤ;q8V2T~ A: %EpaP\4:i^? KpLNQ9 ,oy89&L%nFGK7U6]^ukBqs?.vuCZ0Yf7)|iyecx4z &,6VKl1d9:ٹ f 4G&|OzKntXr5:Rg..Њ};{( ՙ VJjA `RhVs>ϕq~eA湾5!oWCOM1tWZ!M7sYs#"rb3N>hn~MpMa#ix\غϛ7z^V(h|%+c^Vc>=i3_2舊gg,9tȠX]3E# ]uf=q|Y[^Ers^1KS㻷/?N8FR`X!h.1g$*͘bYȞ\Iw[yWigmc풶|^}l~sF1,ܹpC2by Uq2&DiChJu-9. T5ʟ5rQf rC?7ʃ:gDUNVyq ‘j0gM+Nx{`!>냾ǻ i~CqbXYk9([P\ Ӝؕ淚$}ݜ|U :pI3]&&p9A GLs9psFuI||K~.{ՕU~N1U]43iIN)ַwrR:"ˢԤT1/Gx TKqRѻˇHR,(פ4T. 1Atł6:l P9$q nu4"ҹhtTcJ0 3JhKCm Yk8"dt? /4K2RG]FRJDcYYR&*4 S bV8 tr9 @.LpWszLk˘h}u!#!^'f; !~z8L(sRdJܜp]NMĶ ' \3 3;px+ &.iByEg21ghYXԚkV*".tAXG-Ku&J(i<\[urkBl1iT#RӒй0w.{TiÒZ S[-V@ٹ˪0zMΗERR2sÌ7`SAb:\ gWsUQ?F҉r<B\ cR%R=OrқK~iKNfPن$YBzͅƐBeZ^Y 6,DžZ;.rյg*WTKXN-R0}8o3o9׷]/N49]}kks,H{K߿jϩwmmHqYd,X`39'#I߷غXVږVSl6.ůa!9B!:Vv}t4$D)% um% P* $)U=eOAos2z 6R-ca-l&O--|V[Q}mb`X\ϩ!}qć'ѠlX&- ')TRjєѰb%[x%&]-Wb."Zj& `~$LBx5`} y9T+d"/ʖ)e̵\xDT޷аo— :㕶0cA%TE )Dl2@\UF"б 6hsjڛְgPYzEDUJNjZ/Ίa^)S]tV;"Y0"_.ZZsdK؝ЋEf^ODU*n|}qQ Xf|X 5Li4a2V zw/=凛wy6VħM~Y^W>ps.vM` `ӷ}i:=)*̆-fЛԛg+~Pm櫕`i8,^Z^{^nvvLQA/_B/{=h9(栘[+ACw++֠/{~bec!znCzKm r:d#oU^6 ͮ<)Y`qf[;7y=JEZ6|!-SV9<;&hxHZwmqʻfbB˞}PiYA@'z*ڬ((csSOl15vOyNFooNw3xoq"Tma۪ιZ)oSTIgs!6O9싫|es}'A9tsH:Rty6I!Չg˓˲W'pNἔ@A3 hLRBoYeʞNޫheArmk$:KPiֵB]R]HQ8-ENF pF)񚫧!ǛxWnѱ_n`_:g=H[V0w|+3W«AsJFUUBCk&+:/2.Pu񩓫Nn?(AV6I(3E-jF_Y%bu6"*|Msm+ `!6V9&ASTv!SpB8)5"g*@}J­s[0S*YdrUΪ9% p<ˇ;NQUWFcQDAYLJZ2JrY:cm{Vq \X#D4@j֫|HFe\^ho uryQRI g E%kas/&T(gKƄأzl8GYW_OIij*#5D |g@1Pw!9P%Oɋ8@KPa_Yml HEF( J$2djj5Zq>I:itFUBNwEXIނeh`F^Y{`8J6k&OL6f FǸig2b//68'?+}M+8K<~H@AEDxlf~'S觙~ϮWx7|\ؑccw[{ jǂ~Sy*p^T M^/=D;Yw7fZZ6BTm{VB5z"ԗ_gs9koftw}}3ܙo r5c_a4;aX*_aU ւ8< v~]PV=o/=#dWH%yuh'Tϼ,x=k\= -~_ TKPW+amJksIgۆIWbS|g]^T29^Oo-t]b]YvDE"uvV?.B$|v^<[qy|a-9YL'߱F%uj HHFOhDsH* KڈsA tXPz|B9f{֭=pd[^{XJ TxIZJHҀ);4R*nֵ@W |j„4Im-AJZ8T\AR:YJHAIN$lMHj8Gڬص08Zo>T/8d *͉ `4KDBg,*P)3t! |C.og5zˠ T iMŊoIz,PGGFdϊU>$q{>U+jIbs`>eLj [J.@6JəD&u"Tȗ>VϾL /嚎&"(a &Fm"صآC 9G@Y Rd`7(R[Ez ݓ(3 oZu2XWb F CP(H(_2bzIj}4ᓔg؈$h~I#q^7{v]niޒVt (E?P4?qV~ ?<($=4Jm!YπxWÏB(!nم51v ±(,凇 7}ቼ.Ej'd kX|/UϫDU-6EݻR<Ϫj)=_&hR90luljMշf;1x){(|;o^g}v }dԈ[/y] {.b:DM;> g`, OQBtҕ@:)L1xF@ߣ dP($(o(J /0Q2:cZm#GE;\:fa^0E#y%ىg,zDYqnEYUOUO1sWe25ZN,Dz-B8S?ΖQ6#g}N෻‘l ףۮOvsEY0ڲzH^C8m_]n_DϏ;_Zc q(+:6 5dp{FFcFƭFx-j.iOuTx, Ut9b,Kq`ͤiU8ޫʗ/oO1%[WTH%'FйgE C)M.x۱߳P#B zAO_x 'Ӧ}FGa]pv{V?F 7?MA7Ӱ7+AHqާ8}t%A,YiqB2m`Ǝ=2m yn<^Nۼ덿\]_݊k3k BPښVf}^j]mEoKq}^2ۛ_f4O5~AO?uI:7)l9Q= ޏH7yW*Q3Y>귟>~تP^o UrmPn۹2bR u/-ĕoR:˚ņ=`EDzղ8oz NZTIPK048D+\".{G;=xxU3ly\0_pIm8OBH`*26L m2qlJK=EhIf먩`sh'*<ԜYpETͩ0o*鞩bg9.yfrǮ_ߎx&a1ٷZU`^Dhc"QM`b s[ p&Q X `Y#Tgڬ]; ?P+{"\o}C2Ρ q"JjIȘn2 ;'g>.HY7r  Q(Q$wECQ 5:N䅽b,r^krGٮ zdb$Ɏ$`ή`2kɘWLa[gq59iV>Iya8-yO޽R\E ^`p'AaAiB2m!X&Qlj*#αoK (Be;[KmB%UZ3kQQta.du!pޣU@I}+S[#| PD')(j9ht V'!*OUQV1''U?IkIJ*< "P&ɚ$O@Gk2(V 0=h^QzhUE8NnO&Ԛ1 H@]Y!f62-٤1ᵄU220-SHWi+#}&׹}2ŖȮW8 fjp޸LqYhU:Z&'>I7qz Ai,QN<`줲􊭐]=nތa:4mfuO2 H&G 6Đ82/B#E>"rcn8'Ak`'ZD2,مd\@>(U:o!# 1Zp:4`r'B=9ii wAb`[HE)9It`ڤe0 ]J0fhX!8e5a-I ǡ!9嵫ž8l~sr%Ԩh+:::bTkyxttu=g>K5?K=Wrod`45=Ć$' D }iHD_ \MGϥ!Rd"J$ R+B"u<REkN$Tc~@uCZ;f+z9i |~u~#grGBAˍ`>G>靽4a?:&|KBk$s \/DᎄCXh"pOL3M"R֥,U4Omf<+.@wHQ"S c+ Dt&QS RҊUqȀedϱ7)Z-b/eVAo6!}8ɭ>>Y`ʼn\Vo=]qKsgoL^ &=a0]w}|Vt3o/>_ DŽӥuӶP8PO:pu'34X.n4A&pc. 2ډ_>h&' IonGhA@S21^6`5ܝgUx e׊s=eِgͿnxtgt9Nx6dt~=~>ez7[7غnL[8*]1h7BG?|wKnRPӵz<a^;MfW}E:`ɪ UFBQlcs={?bXFty͛UuCQƟ/<4LydNݚڍ_[U)Ԩo>GVQIMX{'meԬfS'jKi%ܑsF?h}7]UJg^Vz{ym?E]kU-,D͖`onD *kkMFRPBU;%RLFE I Yk@nSŔE*"MsؘG;[ҸHb`]֚̽P)bhA:r^~4K咔R8ST %%Eՙ@UXh!`Mµ4=^6kBŌilJY olmUk%AiɢrQXW=;f2i\iV: Q2H֧dBU Ĺl\ֆQd_UY:QS3VcmH"ΉT,mtY ׂLQ 'pרc퓒Os01D6@95į#Z)|1!-8fm@iIXaSFTCJq0Z4MoNTmʦ JŌ$ԓŬsވ⢗chhVbP[bӮ,Vn3,HXF.IGTH! Qc(+٥$ِ"L5R/q*1c= "[ nEAE E'+sV@kazhmjvVqI <&BPyUɖ>0 ken±D6V:؜ƌB$Ѡ+. jd b3g*){SmE\gi8ɽɨҴ>k Ki)pk@E >{r!{X \0Uc~F0|Mil8!nVOQBWJ+I#LeFeBGGЮK)z5Vj^P BoV؍MfH57]T? W(c)(h%PNi58b']`&CD`0& czC%ǀ:#M.J@65d&/bQb8 Epe7j,3;KPA7/uV*$Ni+~4tj\ G˚7%d0OAOuv0&hH 2JD㠄jJc#dV6D7nwiXs6jE},|:omq!C,GG7g}rnmfRY3IC1"DeK;,')Q!&@6HY &~ꞟ~yU nTAӶdz|G{ m|@Jck*qi<g(@O0JFI.G[-^U}똂[$;ƍ%Lj,J֠<%EHi$Qd"̫2VO .kh8{* ×Td2jnuě!q2ptf,:7PJT٫Xw$Qqn[QM 겚 Dw <}X w/-7_ٺȓHΧ,}/V_b=^!ExB.@ AJ,.HaLDP-F u $$B jcJI%\h v9 ѵc[ xAK^H!)vnj5]@jFHK&V ^a:=JD tdI..v"(ΤAbD#E}4"$?y ZA QXTDƒ*jQcQy(? >&@JY2 cC9C&ԚKd 0]o1VIGY¡j4ZITf9v-A(5&VDEKUmXH`&-w+$a:H؀ @U>oZ,a !*POm0M:̃;KϏ՝knbI$T4":9 qtFJG+44z:6T3ݶ@Sȉf1j֘U5 U7C e0%64)G'#1>2#BvâDy XmJ F "I%\uFAtP%L(H &Ш4!)镁RnCb,6bz4 '+ IJS!& - ַ'I7yièCT/2j#I^lX4zWI-0'`ҥKT.D:ƀԎ6jr0JI&zPJQZ^PNF &C@WY[EpStr|dAVA-ձF”P[;i*^@&Tqc .RU0²A3 _B2?MДH A8ANZ{Nq^~cyOvR%R3vD,0)#;&f5do"T$ 1DЅKO0p%G,Xr-]{oW4<):DB[r[?,r{/nZ./v@;ԉ}>͕rnνiR)-gYFf.#A1}pVTB {;HTvH*  8$U(֒~$t$ $LL1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 bI ';ӑ@0`Hn҇Buk$Px&^ tsD^N0Y[vC@tE]u{~}w*^@u\mv<.[D3XM\S#׹Z-T*u LYa.\\:5|-Ƚ{~­/]?\n=ގ9[''Oc7ʿ=1~S٧a#vsڿ-qhjQZKr͛~ E7o0uvʤ-Edݢx؜xbZߧ~\?ߋWvwzݧo^מPq.\G7iq8MOQD;<ВnPy}\WN{:L3:L3:L3:L3:L3:L3:L3:L3:L3:L3:L3:/\pPE`\dd8r]PuXi/%IhP&bI &bI &bI &bI &bI &bI &bI &bI &bI &bI &bI &z$_C"`= )]!`@e%tyfWI_H rS:ig'm89ySyS2<{ B\ay cx{(p>x|~vf(cwo?arx\O)ENqovckgX.?Hj7/o,nޜͷlI #"r}|_ 7^zNlv!7Mjn-~],?.vAd{V=+}}7W~W]̷x\9}sU>L%r۫qP;W5IqO]SO*6_͢fjfjfjfjfjfjfjfjfjfjfj~9p֣Rz~z跚 ;7ׯo \w ZZ; 9 k|,8̂Dƒ

pCI[sW 9\p圷ۚ9CuSe_*:XtЁ]ZmDR27Qع'oTH9\uJ.rN*r!:% _" `}7)Cg}?vry;' Տy,O7rB9~j}mNիE!}/>qoohevܨ1q"V9~aoΖnWפ=Υ?,|"Sά]~1~$02Y[#'Џ2#]oIrWcH]/,$w8Ƣ){AT)Ւ({ؖg_UW}u{]J 9NBKS +[H(,űWi|ъPyg~jEeނg-/m̞ G 屏oiʺرk;?M׳߿ݥ1OYpSW᎝'ݦsZ׶s/їbe-KE(a4ɐ %Co,URFov[7oXS(><#AYWuBTSI)FX/C Tw o"u krM(sFEPpΨ-JhRTNEƸ䢤# WS]d`7(R[E4g3ekѷfhsV:h[yq3/ct,yGnBUoi&i1铜E ܤ)UsguS-/"u haI%3+itw9N=Zw<]s͹{OeCG!~7!>=2Y!𕟲N~姦dlke_p6Ja<ś}~wlKMRDʭJBR{*SrWdaQB܈e *4_tn$}x_gD*grh6 j.7}y]үNI0!k_V oUdζJlS(tojA[^q' |`͖܏\~>0ۜMP*AlTQdvrY$5Eު>RTɌlsl4n)T>5j&" wnO<4缘y+yF~$L"7eS9]tȹ&vonOٿ+ŬxG5`#)˅wfUWݱUڡG)Dvw=y٢(Z_D7tub^(!Ea*ާlk:EE{VYmf=ғ S9,:.u=~kwTȲDSwD캠hY"[q] Q?V<?Q]~d4zV?8.毓ϓ X]4v{ZT~M3qA"i쏧{~!+؜(~]m<~xWs2&%SշCk>=ERVbs q}@ ŐփrQFvs¸sP):BZ-{ Vl5 E{WU? m9bB:39k8zBqFq?)rGy7̟7[9DWNS'c~x˦vZOY,<1|߷=S zKtXrr,T7] \E,zn b6QztwOWW&Cv> ƠQ 3yb7k߼Q|ku3W!^XhIJ)I{t.b:D:G1v!C2]Ȼ^;B}}]Br:)L1F!pdKQ"I ]rmbPCl9{Ru_Gg4D>Xķ_lLJW;=8UL}5tU G~՘V *NIP:"S> ͟LjphQ~Yap\`+ҕZ)u4*:6Vl95x-:&=>zV6U/{G\@ q冘,CdFt/UM3۰WU_& ݙ`@c7W`zѸK㉻ZTiarR,4 MH/ւAHl;!:eJ0dle[֦(OAJ<-D_Fse=QEI9;'4D$-!' dk7H9aďg d2+2*Y-s0ArQɠ$Դs֎[2hh &R'-8^ Wzq}(e-k_aWuk/IrcW%A9 'P)Akk`:VΦ]$KPVX{UwYǟx{;T {Eϲo?S{Ze)y(zVj Q9՝D>H$t.@"CEFlI ;ɦ'GU=e u,X`s2z 16R-cQ?-l&B-\^(7PPvWLLp͟Ѕr:-.KkR4͐ Bdr 6Q(^%+ThkS6/$)TMel4l0Y- }\Zb."Zzrv[lXv38= ح0Дb9D#c` H3zʬ` hRТ7 i@g(:l֤,xb2$"ن͖֭A=3T5bǡ,`[+@c:t |1T(K5(:&3PAD"ol{שAM;zM{JJ\󡰽ڙx[uao)Kv doUH48^YZ9ipu4Z#}\]a8 NK7{IR#%arytv}8q(Ci}CW4EZ.Pw׆l=:!=g%rL&hqY);Bx)PF`! 7[alCi^5igrYgQ;֨x|IPP84Fu.|IHYMJZI3J+/vfMVH;@pf1; T ,G;=D4H 5U@ڑQB:uɫzD J%9DPiQzrRhuPVUc=k=׫?Iܐ|BXed'.]Fr:\W]1CqP*{N";A2>h IF2 \-' ʐmY* jx8Ϟ{FUBNKES,B$Jdo2Y^#W9&cgw7GSJ&Ѷ&PU㦽싃PX^">~X_Xo$H_[$ m2"Ց|\~>A5z d<_Wڜu9(uN\X)eb 4G;Xjo&!jlGLMca:C"l a{8]bN;:SNYyKs{БANgr [[SssA3/:y]dzxl V>2+g=G!ZTbo!$b3̞ v*WF@Hy`(EɿEFT⸶8&)s5Ov e*\ςBl_m(| 7Xܾu6}*Z6oĮ'tFH+ˣ˛Yi|_m㉾h7tNUY#NL~DρD |mH$_ :-F%+ YE=qV, 3mlFc6PK ?6^ ,7[0֔ŻKz# 獠ʼaw0Ђf휮 GsbTaϡttHԵ{]ֲ4l.=|`m Tޠ@Y)S'LR 1?{Ƒ>mpe%~F?e(PW=3|oD,NWU ՞2#,RF,6bAr)Z1dKw}mҹ(le#h6ϰ'P]faK]\7#Wk)q)cԑq;}׹V,tMF\-R'gRQ7Sj4wүun&)Rָ|0y,ɇKu#hz۬HtWF,B0#BXYҕ1)#"b1h#2ucu묓A]4nn;WiƑѫOkҦe0^aS]M+7')7XHǞ "k/%%< )XZYh64!7iodQ@;W==i aMnEB-hݺͪ_fP=oE+#%n%sd sn%t\]YZxa:Zx vjbI[&vRD66L׍g b>Z-wJZnjb[<_XH5"|~3Z<3M'p1>f7̷n86G:fP=n$k㐫U1Π>bJ)*6#5[эؙwJ3u" tҢ}W=s=4vno<ց͠=qx>mqҀoilpH ~o#s+$I)W*idLU7'xLv .5J=cӆK7}3Yt꤃,d4 ?R=4M];p>Pux{Pﳋ ?iC"f-=}EUj=5y{ |7Sva*:0%{v郠 ; 9\_p~yB%OEI@(Ko`w%\Y)ϑ3_etu}df AЂ6R³eZ7 _ݶT<7 qx6́[Pjm7uKQ%Ȫί'G//iV%&R/?e~?4iDYA;pcؿNH Y,=5~3D0o:hxUn%&r#ڍ`v#W9|m7jvSWbu%Zuc$fg|:uUV]FuŨ ӥ9 a\揆jV#unhθG فA^-|MrH6h *Wֹcm3nZ J/aN >6$Jh DGr 5V 1R(,6K LFj냉j5eDDL ` Xy$R n?6Fvec5.#)HعCMa͡b6fh;ss?X٪|.>1(5ZkNt{O I \HGmƀ5Zm 4wF2ۨQ1,%! j@Ca ngY3yb.D ,ey$Rj$TX6k^|6/Y&gĨlG),vsZYĭ{V+ ]/y,ծg]dke/H-F8U3E1!Z"laՑS˨i+Qpj Oc** | \K ł%ȳscliOp3> Itv{*P]?}gYeݧGNۿZ,Nd\*jt QmZ–9ᅥ&:D敤6F$:|}kL0l1ޑ %~dɴVf)&Bb,A"A$`6aqcPlRƉ1~ 뜰nQ$i(J0".H%< V #@0-SNɖ_jkFavP[PNEoak2 $r#6CW)fiuk(9#A eoȝJ]$-њaj .^8`#/aRq;|Q$wVW$cxsKή(M`ԯzӅSwUzj,UR"1*[dXtWuJQ%2>ҥxDzqI'Cۊ%? L!fe_*$'=NW-"{{7yn(.#C ;Kָ()b`\&zY8nr^ |Z'eW,q_pf#7q}ߗz&I!@~|z"jdP/2IpGv{0 Er.oݥ)pgylS  ` ACRrGȎP҇'BXx?YƃUyˠSnerߎe\<׷hU3x} K(EՀd}nכJyertW_0p]θ3`t5v>?HM7rg$8{pɥ#lwWW Fv̰_]g[5!=匭rV)W]|,Y I7@rJ*kG2J 2hQ%`4F7y)>_Ok8Mݞ & ܦ% c1W6Pqhe YLmXɇa#HZ) #Cxb>_H6T3)DD00e&x9 ( '"9lV@j$J &D46}1r6Dzq f|t&t\gI6I6M`vSB@L8[&9unSϤEeMA8@xwׅ1y1/:4(xLeYrLG eZ30{-#chnDCxA 5JO3G'HnyÌFcD %%")20U]KXuߔqԬ-0gzȑ_xg`ff =8,2#y%ٙd1}|uDY(6UYhbR ]A 9 $7 %%2( Ɛ]ٗ6"fJ&~iɐܗsjds9x>+9h'wӋӚ]vT6&^û۫T+oIӂWlCt$-je[.Ci-"zP.sen꾶&#X۲N'E2E<97 5\(3)ښpk($Wʺp5loq 6<=ӓhɧč' Vz RQ1$"MVSY4#g".xQBI9H4ܡ1KA>c,[\pk0=/ZTF RlEZ"a1or1DpB"# bb98\U}H1XhQne|9%bѯsRI%h BuDt!V%\s3C2DE$#y\ C]m8;?jݲ׋ijW]h3,B1ҿ&ҋ!V7ҽ$kp3o}bWjqJVxnDf {ףKߴ'|E篻;s,_hq,CLdz}M_0o͢‚7?2sFGz=ܒ^:𷳿x`T?XڐUeCkbyx̍6R2Մd}TҪD'g:$8Ɏ7̇PaYi1|4R#'N –$qnlɰD&Z\bfQ9sEQl5:`>P;gp;RAzn36ݧ`_S9ͧqlt)~Zv-Y688s'rvZ]^5s&$BN7ZQxB|3Ȃt$:D!6HdSLQjJ$$Td%4 xM6Ť-{-.M5zP16>x3y=3ch/iÉPFp>'>;!s0\PG[P.uQ͍U(JAr(Mk \L: m ,}3iڅW`5P)_CO(G`O^-(PJ"tΠdhcp=HaJ[E ?~wFwd!J6ѧ4ͩ]1&"rtK賏׽Żxi&d_o˛V3ݘB~jMz`7sr^f~~在ՒX*[p]7m7v;ԳVl+`8KmsT7(< n`"Mh}!9@BhoQ&h3@u0: in`&ZMuoxϲי-]wVx rf!Q͵/@hsץv \0XjPKfzs4ٖNWm6^]ͳvH-jdٽMUww,󽖛t2fwY;ѡ>Ϯ-O'f nὧ[Z_65n1#wm\ P@$#650=i>׾#kSOM4 ɔk5AН`)3TOzR]֨ǫj}QlMtV̾X>?/p4Fc)w_ʀp|5.y6Jn2e6`YqBr;ͽ["ӑYO:U+}}t|CК킮"hn3,pyN|Ԫ,~?{ @s~;sO r3M/BGDiRfY!t#*\+IJJ&g/G^Cn䱀g|;#/v(/ݭR(E-oQ`Coqblg>qo yغrlؑt=m3GR Z3A;Ÿ3s]TlLb]|ک 2d@_n64_+ѰCpp<᭴qRaoCe/Ǘ1$&b2j=Z F-6'T"KE>|9 eVJR/$Vgy&dN"uu ::`m .MNg\ЗK!2)j+j%}ʅ!%<7(c9[%6>7tOjQpcTP``:IH Y$nH%xs!u#ȅpErˣg.pn 6ϲ /H ,d~,6 YYl)uH,,gՆrOb&eTZb{R!I~SӠ2l!pOCp)X49XFWg17q"pNQOMf% @4$͵%dGΘ$$T*U j' ~Jxt6D@"!H&JٻBL0d9 cF L(#W>(*WfĞfP9 x;L;{&|69.hZ&K^vqډ(=N^xri#BcD7ӋE-}* ߲.5:R*sY, e"M & FazcZ+d_MЛY1_ @M*ǞtǥkcOZ Q"N CɉM¬M?d8.'GgLq2ml۾盳E|Ii-]ui*.?a6~;r)U8~XQ½4'hGoIrZYEoqzr -3]((D/$j=vMotI^M˫%6ݻ+:%QtCR7--:yזe\~E-U3xP F?mcDLjoDmĻ+?h5>e] z:%<u(og#4\d.|U"MTR(*(|FA ^G|ZdCAy6Lsƒg$d".YIĭ&pmIDa#dZ8=Ta7!8-d>= Qn*+ tQw?>wR%JTesBK ǜffeebⰠEL|*ۡAv{8iM(&^[(7$_id#:(d[S?L]j;U6XrhɺRƆ(<Z{z `rt$` L.9mt@htr@Cpcښʑ_a%3s$4ڪ$Ijv7>d\SZӤ/T{n/!ƒy@_7|V);=yF PTk(JUhl85NuHz|Pt(B/=[zv`Sb,Ȳ$}0Zv󟍊w9^,XߡԱȿ19]V@q=PYNj(b&% J/I+R I0xFJŭhPeq QiZICՂT)MR;C lBIN$lj8{r~Hib!MO7yHfTDWPD0d%"YLdޤ;#ͯ:9S[l^UjFKĆҔCeP_~(~w=FNuhӰΧ>$hzI#)}f}ݣ"a/{ iޒVf w4/Fӏ}b.yGߕ164?1sD3ݫNo͏7w8783?-NNuIl丸f9kӋB]-$9={($ \Zc#?wa|q4_2(]IFdY (D> jl&w5ty[o'd ח/OSX|/}PZs/}=q"{rUy=Z|-߯=G mhb\Ϯڧ3.uWVGwy5c^sٚ4˿gѽaQ%j\Uq)#VC/7?ΟmTy&7G{%̼am8`1Gyo:PΗcYQ K0ӀxߡTG 4OFi2?׏J$5ӻ v(aɲ |K"ZC@,(P7JelEQS"eZ DdJ])0-uE9VZԉQ9e:FgAQ`#pCe+?X_Zc*7nC@᷺Vp_꓎G+?[kYYq2"~":t|xizc-)]k:Mݓw1uHt"ġhP4|gl$Sƒ?ɼ"yo/ ~X+xV~<] lt$(S@P!E -J_w; |q eԢ](5R$yT\l'i*>(`*XAf$[55hkfZ +Eq1BPK o덕J+"6+NZ۲͆[EiVf2go_Jl39gu "_d1ZlfP6UY&յ=Ut s}~gʻcT6,:$^(DZ-] !jȍDBԄ:G<% yTH)U=eaAos2z l8{5oe)O$XcArD/r ˋ:_6x<8>/q֞l&2 9D -do@&RUcc4#0YNǤ|[V 4T')TR h0Y-l bt@W,Z˴nIf\ j7CAm5`&楀&*'y[B!u@Z2<XdZPZsM Dc r7lEG-cM"k9:&VY>QDd:[fٍQ>s b3/"bcDDÆ:RB+mF)dLHX+95E*e Ca%Ӡm#LC *YLH!fL1%-*o ueF[ULJ1.\D[Bv Q0}DAdO/Lu4BpO0^wD4 ,ugkhu#]b )ᄶnK,I_U֗W砽g8ST *뚽'$(YW+*GU;dSC%ʠ1( f}Q:e2P& gpL̂l_EsQ ބU9bn;k?{v̌WYw7/w6}.!fGL [3`TTd.H\ˠr*i!h-1h6Z,xx/V [ Bl)+H"Zؐb)FC!SNQr*AS`E\DTOfxrJjVN%. G#غiIHa@sٕW_93fOv! bO<\`CM>lxɇnJ8ƁGH^ǨXiHyQrt6DZWk,"F3xtx=|kS:3a3!)FgaP UPF)*FZ0i<8rz+wπk!ֻV)PY'`!HJY3.F+bqlHޤtv_т8T2kP_DZ[vga!>_jUW>/ξuF*02AeFb`1Q"r`O&Bx/nf0~kk3b9$W|MwS~r?B-3ga|CX-v$l8t-糳+U>V1 W$闝q\*oUu%cg/dh~y_~RZ6UhpdZ \ 62)*Jq)R+WJcH$Mkⷷȹ`aSOZBħfl%:|3TrҪ U 崛)J6[R?myc3e(L{|'RƘZ+,I䇛M'5K:5gO,UvR^Zf~n^QͿ?Tٻ6)x#nE.EfOYZԒm!{2_"pT5X?_n+HYk\U/±9SԽ.b1 wɶ.t3orZͻO6 UB6!ؠ1"ƒ Ts { dy6ȊQYNPY=0_"R Y"sFey6tT:d3VFD<;glSzM:! `U<ts#:fC,E%r٧.%LlCvf<ՏgJclN{pV}=KTߞ ܁ѱ:1}+YΫX~+̞Us$SOJ5Hsbv.ڟo()o.~#qӟ2 ŐWz?''1i?wt3O?|\VqR"ғV2IV2dHfa]ݪ8K?[j^;qt%%ZiRy9,' Ṯ?~Ӿn.~56]`1QO&&^ y.n[!йxV^h5w7"Sowku^ 8N㤥v{_JDR A:dfC!c,d,VdE=S@f@整Q8:ښ8p?Tޏxx*-vLhX&l3+|Vt8{"׶zn&z嫶DU>| }Gz` TrsqkP1P3" Ę ^=[63vZ!j[LMa nW!Ҵ,E`uR֑(r6j,Zͫ,&ei<||pfmC#\"E;sc1 " YxLᱦ搎UϜʏ6raf[XYK1dF[.]ipE+Փ5hR Ό}G;' }4YrdFNb&Hqm6*ZAD]j챍 u(Sm Wh ע3iw/m,␑}m}MbŲkw 2w2J{GÛ۲|ѸIkKtj1A|)BlX^.1..?Kğ%2 ) eNBBA(lc{L M I} lVH \邰.Ų(bV2p5Ƅ̽k kSBUgK y> yA`#Ⱍ: ]K]I7BfoРwcbp,3;vNʌ.Gnһݑ5%قndwQ=ecѲ_d~hiӿ5ZS1YmiCS0C=rv+Z3JhBzc4GҘ. ͬBrv?XsS(]#f=`CNw!!ߦS<>7:cq^r'%Y2+[1tՄ%k>0T ]RZdx&ߏ?^wv{>#BJH튷Q(0*qBdP|n]LWe4 ⳋNRwMKiΘ'0]x|:Ln|7>|:P-ւO=U8 F5B\|F-!xO^P %6dl6c-MRv(}yYp,X󊟝 E.<"Qs~n̝G:Xu6t<ݸII 4CkWmG{q#Mm1\q(`-5FJ MZjb%5G&ä9iՃ G+V2Ni{3gKԲ=.ԉlOڞGt:->^J:pF$E`φ$ki:I(I zo$*ZI9-J8PnIE'BΠy;^Ii ]fW.vzh=R)~ͤ6AI[d/ OWէIke@)R[C%TW VkT>zE;_|O+5^m} e@qWl%j0N4#wd05sE_ oܴ?#.ku60]ĵp.0M"@aHU"L`9wQˬ]G? wM aVjV11p$:U3I>@͗5\:s^ onB!WM[/?|f<+A֒"`bkRt)5m7hk% l>*=*·EJgpEB f@Eفw^X,st5!1ҨF&A:|pm9"\8@B i<ج%sZ,PIJ¨B9-R2X',Ls27!EȄ\ )vx|~n"+x79qqnФ2ByAsLʿ$::|P2a?/˲G4ipR65Rux㘗r4. nm:[P6wk[Oj`"ym&* R XF琭a*Ysxve L ]_#th 嘒1LB̠<RPԢ8I-讔570ܯlZt6,3(x͖ s^y:+) Q9J 8nu鎕B.pHs5 ol(;BBD/-HZekglpAzM鎦 H/(9ćӄҸi@V:Y2B׉Yܡ)lWV(hC],VL)sRdt^ @td˖貌ruKʃ]MyXt#) xe\fƂ> 6C|B+M.Gbqxnpw\oUV 40yT9a)KI 3ޔ#bYVg9YJ{6f#iI<>YGGw[M<]&{r~l&]{Qw!+ +Kc*JYay.qxM%drfEJmH2AK(xГh.0AȳM>kGk׾Nj.NL.#o'7&()hw5yFBӇۯT«t_l**.Iт>ZZ`I]4d/n8I,11Ff. LUdTFx+[ zR0H]ԓGNQ%D j#c5q(ꥼV[b“bRlwzfiz? vBF*+GlLZA!C )'*!'YV(\]Fda(񟋜 6A8`F)c-Td8ۏJpԮ:ڢGރݺ/zT9k Q~|1,pm}"d&:ɲVMv H2yE4aML,( G(:< v2Vg7A}) c*D>ED#b[ )0uIQsv "&RsK E!"HP8NnPPw5Hq(fAD<I%rYD2!P$Q CU?K4՞pq̕KjP\TqQ,yMŮa6(hSRKҡ/h $2.y/\:9Քϼ(^Yq/3 M+dA$ZX1ˢ ْ1!+15[Ξ|6fO.B巏/1(hpta8E Az~K%[ ~= dB A~eE'b @$#Q@V T37$2djj5Zq:Nz)lFYB1OwEXIڂehME^{d/8I~>V4@6mٞZ? 5uǯV j(3NE${-#бv^q_7?ɚoF"OeD>ktv Rʤ(/*EH)#X=ڱ U}fTC~D'IUn6[F~bf@F,`W 6AB56Y>e۬:Mck;ϦED:#yQ莵A3KهLs󔒞O yʅ;;@m>Dtpֱ Y֥)56DI9X .B=ʵܞ"C"'lElQ_C2Jl26e6w).j9{o#T[< mwV\gXkW8E^2x]fq[Od7 }H9Бc?i%I燼/yp@dL'T-b' &DρD |iH$_ QmsDA뒕EE͚ސ"ˀ66#MM2J3ۏnS S< m7M2X7WFdJ(~%CW>ƚh0/BEфt-Q,}1g-CKQ."u-"ݕԣh.=VzJfj}^B/j`O阼Ɛ0I%D"0eR@cQ 0Z9GV^86+aI\/4vG)넔gh'O&dvgۣ?յI3=jgu[7 ]'a0]uZ[Y߸Ҋ׎ [lۺsk CWzV)Ps^^0Jz3yr}+yj:u9409L::H"<&c+Q782n9&^Bv QʰiXzawBժ ֤M&󨖴t,q4M!_e g 5 -,.r&~,·n=u MNxK5IGwO9<0jy};ws6!x#ޟf ceFՎ>4W+;치g{ xzG4o<.CnǼ1龈嫔`Yn+\lc73=#~k`<[ESBΑ.pBpZptxApLJ=A>-A{τq A&UCdZcL^ OFT2N5.*QAEE]0(\PS(M QfO*%a b5L4[Ξq_}A?XMX J.F_.N.қ #x'N%j?}rq75r?n}rQ10,d# Zm"y Nˤ <xzzѳUIZQg0`&R#Hj*J*o)*Dl!E%OSẏJg Ipx\oml^)PY'`!HJ zKű#f\X~ ޅ[xv_8T0ТZ[v.C|_d#BW^ <콋q*k$2K&6H l=&B 9 E^"> !{apo&7Hy`Yn }9pwܴ+~7u_GɁtR r=聐ĻGNp=Y`1KZ/[l^{D&1iEdN& )M;mhz|T|ع*-gmv]8j6-XldSU"RRەgƕIS&8qrd59P|s܍ʢfƜ\+ Z`4}"]O(!*9iURɽqFU ;eh]aTeONm ^V CfrsN>(8XtFqs-z3 #vEVb]YvfB.Iu>i)Ҭlp#o?+Agg[6m;dvRDKJTͰݼ㳡Dk@'( ˘w;d9y9]x٤D'J&`g[[5M "SwhT< K!HVOma&3H)B*X0װR S,/%$$'j9{f|>bΖrgFj}W/8d *͉ `4KDBg,*P){(! _|.Nj $yWjsiMj׷$e"z,PGG2Gdόҕ<|p{,? ,Xqa>ULA+1 GgK&D$u"x|1?V= ObO/ !"(a &Fm"XآC 9G(HR)2-#=\j ѷfh-D:+:g#/\!(o`E1,dY?<, Or`x['՝u1T]`\}TInL[ki߯Wy? ӏ_I}ݚT֮œ$Mv3?IYٌ؜Q׋aق:շY{\8[Y_*&>=2Z9[}X#q>S8]}jrOnrlyOtᇃi'2˜]wӥ.!>[7>|_ݷ_Y7YoE+iBipHC9fl펄W]PwWv:``k=#Iѷڷ6 #ȵY3KqP0(_PFc3}wo |UqX뮄F{kh|Yw·OPĄnVjVKNti@*^P0K# X"왷 .{i<]oK!/WC 7wP|7dhR}s9GNO7^_v&Xې%t!j(P -V]_[hdŽZcI1f 3r⟞6 @DF@RxAő.L|X^N&~-%9m6q[-6uY<KE$42}37yYgҖHJlT% ؂5gȳɎ3NʺtҋsO־?W 等)rC\#G, Z^/^ I]Ou=NvTd[v $2gYo ]"8Ɵory7oȯ~]Qqin &y||Ǩ2?ݚk~٩Fm J. --.*2tv6k͹`o; 2uӒEc%U|& ]^zvb|-?<88FlXs]?fq_/ϼ;>tՋZ3g糓|_L~~rkBi!\PU={QPՄG+ESkl;kW\򟏻F{Fٺe첗]^v.{e/ewM]R첗]^v.{d/דzU/{U/eW쪗]^']wˮz̻ٓeW쪗igZ$P U/;ꪗ]^vˮzU/eW쪗]7wˮzU/eW쪗]^vˮzU/eW#Kih\n7<\s JeIz0ހ@LQ^.i'S[^ JYI@054>))քAH|Y*8+"DLYf-lEq#(HMt[18F%l;hLV$&HN=X/}.`I_C,Q dJT``9ɠ$ib[X  NZ %1q\- "':tOPv>Q1ꝷPNg0ʼ&"c7dꊱ-|H/NN@>8`Cv(/$@PI"ua6<vWY :Px01)jx@6G66˱fEtoAuSɠ>HӁLTDt2?_H-c?}n0姍`54Hv$})dE$=e&HדI>D/djE׳6lVQR&,CQ$"JkJiL؊ց P F,7is +ɐPu2rlg5~ЕUµƍF^BWO~^4O>:|$]) ,O.|Z<ēaQd>Gc׀ub 3Xm͠={ KMacbwmϸ/E(gEZas *CpA$%xPEMnt5f~"׺'N]Sq!ph4; uJCrzT( n9VԢm4QU Eq1b23U,r`t D6&5g0ct;qmtM ;'^%1-";k:Ty}j<&Ɗ}\rO竡fw·(*o_U!9ņ}쫥 V*l%E")%-(n0JJ%ag'`SQ zrj)m z%F_Z#c"So#Ky,l621ʎ-vv^g(>,N=Ͽr[͝`/Gg,h"ÐLĐ {2Y Q5}'N3r:&" wC|B EKT8]-Wb."m2Ҫ9;LOҕr_PV;ݸ$4?\S 1љI.dy dZ(I!c#Z{9*cM"km&V>pPD:oiYsԯ4*0 "6wEDl#L}&$IAR&ɘpk9TKAAw΂cZ\1*Rq&-flcHZ")OQzGQBCWҸi(+.Ƹ;.v\(P08U“EA_S$T3":.. 6nxxV@*|EGn |3H,wgtGJ$)i֘IF:*!$J& $lAqviۧNZ푴:7yW?$̑$gK} *ϟkO|NӝEc!d6hJ06+]68a$1P!,KAl=٬9Heb ݧm9*y֘+wBG705)Bq-);/|PdQA5J\rfKV umfY,١6xg9RiL1lW9m2*B{~:9μ(^T8_3XU]6W"/(l95&d]5f͹v>VN|@cy(hpta:6]{.A6Q VQE)vhi_-p?o-"4 G5ؚ!& 8N!C(PTՊg=NqbQLHQ%`PlB]!D%`gȰ uȹ+[8toRj[;. h0xWݽx;Ղ싽PPpN"w+=Ҫ3NE${\,б[[n,&مlȄU ۘGHх* *^I6{eR+P'{ @6&y3 2WZll%Nϫcx} _2 ?}ؑHi:[r4V07ڥ~٭;wr[+.2#~$(:詯5"nj׋l|`T[%B#,ty.|&/姕  K|דCߌbwcV V/juvHϜN }_0oEaO4Ҙ[xyj£q=ESOgό˳ɒ௻z6~KsV>mʨ.Z?kQ\ [; =0yC0dUvRv\{Q~SOvjx>DtѷHYYa}$H!)F4msU[RD!Y'\ࡨ %9IFؤRXIN6j՜跀G +;Enk0VxMN㎂UD~ME*.җOօqwnN{08vttzV|/y6Ѐ%3VU0 &S](|(J$D9ѠuJCVtQ@KBd0׆2A€+mSL^noQhS(GÜG8֜ÓE>H,f'ة#Q{>㼚ZJC=U8vѼoQɬ X A7{hΆvjAk6 hFv>S(vLk2#wLK2t)d 0Z9mUBX7pmIݴ ?r݁qQ>I]/|O&g|]u_'^Ǔ闓5 $~Xծc'v)~jOtuWuhM3ƊCJVblun:u ߩ#Q=TwLdqn@`ayee(Cp/.hm:UBv Q0}#": a/Lu4LQvzPPE>xx:t[n·u;պFnl|?1w8muG;࿱T~6ͷ=k#sCϨ360bVj*\%(n`6*FZ;AJ}<7GF'oD!r{US#'`!HEo.D,i7ˉnk:Xc-C%CCudw7psF6K-@EygAђLޕ$BSVFF3X,0 L`DX"ݲ{}#yCTR*Ze@*VEeF|EV6H{L,,<Lr0VƛwK:2>qElnZ-~7u_'2r\$xG~ҕ\ߌXӬ`yEZ[7 ^{D]I%/c 5Y_Wpiju-\#Ukȭ2>?۾6WbZFW!rJ(R).EJ|ڮ<34Ol={~6xFeQ3%">v*}"]O(!*9i,RɃsraY38+ 6nXvpm[^{: Rz3sQ>d[=x:~6;ŷDrΠbS#s DUdC*בR\w\أ+؋+"Tk&E|E+/e!m26c=F+"R\Ƿs "iO2:)$]H+Bdi)r2XSf68>6}^Qyxxv>w|m[ЯM,gVO-HNZEu37VjgX6p.;vUڵnq=mgXtzhg:R6+ؠ:bq Tep꧁+sC-D?rj9E2[z9_Xx3rseT /tWg z`႒5>G`W%.곤&pYRZUzҀN=Zk柣DKUJ?>ys}m&L5~8wF/ģyɓ'ٌ}TPCWKۇ?7?Tl|9P1WORvv)W_'+bb>#s;-؟Oq޵*~<ߒG0z  |+h}+9M?^+Zn,'5UoQ!bws>FT"5Jjz@%3l =CJ}iu֞fē!U\4B W);$ҡxBpUVdઊũUar\Bg1'W,Up2pU5TJpR{+%-#vU^*]z/~\*~]3}Zh'C/fӥ[J'o|S0 tPXDׅrJ!#3Gl%*R (l4P<:EM6ڏÒe1/K} k:M\].vZJyuVtuJ ݅L FiE:[9̿zfAG*&}\ԵՂ 8R6#U Ͼ`M Glo ƅgFD `BDN,C[ %#nRTIN)K(4 o&qG0ܼǪ+_RUv7K;۵ߩZ͋- vd1׳>L>/y\M.'%JV{$V^ῃ"(7PP"ZH6%FmQ Z2F[ہZ)BdT {@c65΀+m v5u/T(vE)v8"E`GZc =DG8x,XӋ@}ވXk>9$lh -=ggub"Z}cw}mAՙϦ8mޅڼ&Bƺ7{}[{ f=;ަ]т;^+QPn9l[{{#Z0mEVs:3 8+-ƹN]uG;R],y],ѻXУXЋX9&uD:hY"0/Lu469.s 0CpXYDJ`F ; 4k&Ξ ܧ}CkNf7߯ƛMw&!w-NW{\pGˇEyѲĎkͿ2UxK_] !⁐[E[4u׾k>?TkɪF=[ݰ~96&;vۆѕܢkgnׇ-[:e{<{Hy.//WNGv מz^oju YŌ {?uCUW(eD깯{noVCN(5VZLjF੤fTi Qt8f fC-Vh(bExթSBΑU] W tcz(^"qOA>ֆ)ATL A&1`I!@Y*&#d4Dj~ H&m&1 )*i.*DRu)|c#f$Lv]f칑z|x!}(Gi$xy|*c;ϗ'O=pWn%-p|k2ĄD*2E\ˠr*i!h-UѠ6ZUj< o YBm)#$Dh-lH1)(|ƀ 6ۜ0|=5n(~hn~v.1" A6%GW&Hr1) \.=g@ y@360jb$KpA)8WA J"2ՀV*@+ 4e)9nzUc9y跱SŨ^E7UOջ(ՆTm兽ǪN8}(=CۤjDcy>D߯/h#susH€a,c&"SRʅ=O咽KՈ^Ԉ*AZh(-Z Ox) )o3eON'1ZYԔ:K@I{ !$EL oBR^ 7NKҘ6g9.ɵ=M}T.*NmA6[= r;iO`uNIW߈j U6T-?uj/&IX:e-DsN#V z7ӷdK4JtjrIJ92QfZx&MހjGlbL1:cT耏 j,/lU rLkL 2bv!SpB8)uS8'ƌ8eIx}|CGt;^S+Cv嵯\}[ C/z>_/P].;ޤ"JDq&Fꠁ CN}gdT8rYnY`gg@zTUAUfMdDB{ ɵKEh%J g U \[!NR dET(gKƄcc;k&Ξv6?dfTb(j j[+U܇]@BCp rdK 1q0&ϢKXmDL$ְdA^ K$$BlPbZͨV]~'oCҴ56ڈ $mB\+:ĚDZbU4L08GBњāh8 )n2L  /ؾWD-0*/A^B"Ikk#²}]}扞ȡoyJ\>O>HK٣IQZ@XၔK1y0G;#cwmza4P됍CMЯӺFO*=ҍ's"Q#NV CXD&%xgw/r-OWVJEn/Kj!tEf)靳ZuHzͬ\y=DN?z BcK}F'i7szH1- [<ȱ8H pL= *ɿEf*J؏.dl^SH%7MO@Eh^9<+5ht{'WW[ht\7Mbn~|\N6pE'h}={^-ܟzMK@ވRio@.lC3t5ٗZ42x'[K?emWt= ]˛#]ކi*XpJNt4:,% aciz{kٓv{h2HT3 ߲i _:(O(O־GIY0R\gTfҩD-7Aޚ,ɰ?THHZm/[e1UQ0tF4,]oIrW %Hݻ""p+uHH٫S=CQE$yLWV|Y2w}˃2?z)ۈbb2_&$dW`1ʨE!K5Y-cQxޥ-.}˧w_=Ⱦh08sM捹E1O+}|sR"jS6f7'g?"zUBM- 15Wh)?Z8Tq le3=lEءMDE] V%:SL5ZAmuL)Rd &k!aIkb3Z\c:0{+3$tֳ #gie##XHܕ{ؖ,7tz1l?f*+}>h,)5GHCЮҝ(Idqe&RA3SI@tڀ Ip|mv*ze2b#ڒ=D1t`-Y$[&5f.9dˮ}zG56EgY~-',ѡ$UYeׁʤ: ihz۲ȟX9L2V!+ˌeRoW!T ^{JZȢ]w%ExqE@[x_a,H 8N&:eT0)-0(ɉDCNEΆ MwrwkO:+LvY`Cr%(]LNEtDLB 2esb^ mYո1t^5J1JECդ@uTc aȞ!k}Nw_yG}CM?b@ E$dbO<8xSjQȻey%GO55mTjA 1֖m%ZLdY )%&1A^Jh#+v +HT10;mf[j-DQ8 ƕX3"d]@RiPk OgpOJx;yuM_7<)ݴ:y.&p$S==?Y o8lc9SݿwN B`*]_7~ e^!M:Zj{XϽ |MK\=,,dCrM]_]כWV$n4x؜M[z_E3EO3wOeo >~U_Y9ϺZ:yY?Z' t'"4?85}b35#Km/C :$ճ7W=lU@f)pD3Z||oix/O\wj=1vbG!rY8cvrus~G풕VEWיO,F.r力n\|ʲ_d^y} Llm+@3xބ̒TٯfeUo+ <^f%fٚx?c5}GḮ]l}s:W?x4*\^23~[QYz7.Ol_닊۝,+oq+oOI6jb@)l yF/Tœ0EK 5@DiT PnvY x謽YI."UCe>!KisLȅP) Gg 杪=O|#Uq9? 7b:OO |Mw{q(E-MP4k "Ac4ˌFߪa=F>f$RՈ%PeacFAu"F 'jDQ\ceR `?(F2t6`ܭ'2Bc}5|֘ݮΗᬟ pMCd"?W>FTLrpdȳ >Zqzp*MZ׾%/tkխv5"1@:iv'Nq@@$4RT깯2$]X́db|Z[;5@ C>D) K_w;t:99zݿٻ}'|ǯYFHŅ,#HTnD]9iǀ %d%%53 .f (.fU ꧔ٖ)JZ˖ZP):9zWkf|~ '[.-ȁ{mmN]k÷<*A;/p}J/X{M~|wP̚^ۋ ѱdybFu"A")%TT&kkJJ%aI"zr*SޅI%"_FȹU8c[,c!XX>jkj; 𻃫,qhd249/qVf4L:2 9dE dFg2 b,b$VVS1I?,bK+ kƞO(d(¦l38P2)l @&19w#vMsgZrDQbr` K1l3~ru1oJ1Dq!!`@ %8tnPQF|ulg->-d`EP>c A>h P ڱ.dfsN#ڧ5>xcMYQWd] Z`1(K@Da$__J' &%lPuaI+LV>9,/&G|G ^uCcr e}2y!LA⭚81o$B2V *()#)*HB R.qB GNHiL& ST_] f-+cC[|E_x73aN(TTn-WZ T(ESB=w,K(/W<2C2vc]05Qu[U۝A;i܁FHmXO2>d#y :@X]ؠU۫y#o3+NϿ`Vg?TNב:jg?uTv^\Β)wiB ᝜u5rV7y^W5:$̸ﰑ&C;ۯ_굼(V\p K|~SwnM/~Srq2|t:o|˒g}#`tn}c]5̍9+RONgOo롷BYxNpU &l઒{6ƎUV]J#\@rXj'T*R=ԒuTr"O W#X'kG{"c^TcpWnm@j *D*v*W,^\&t>p5V*}\M_ru|wei[VhZ:@j:P@_4tM6'<8sT 4@˓lkGgZOSZK~_Ϧi_f^)l? ?XJGt6_Ŷ/rP)@]L=$O=ѥ[J3No` 6|f䄿-ß/8m&oTxz_~+B[0cKREmd*;$k#M%ԳTr+U^yw׽6+ˢxm%i0җ ?o@9Բ-[Bo[?$|.S1;z0١*f̎^cBS *x*ie"t)ޑfDᣨK^@Ԯ(H4f|y[Ho81Ctޠ2e-ɕ,tA"d0niҗZ5d:rھ()Qo, XG*:kjXYJ@K>@:#w_yDO<ͽYlO.0RRtAH M6)fcu?H DMBȻey%G߽hxjA 1֖m%ZLdY )%&1A`u6ڻQm 1EAXhJk(I.sk )h|ZHk ӢRbD\x;yrMՕ]kor+J$Arw Y Yь<3 Syh$CqvSd=4 s3=?5U"~׭dʩn'7| > }1RŗOҮPj` c p9ݴ 2˓G҄Rp#Z.kǦGS-GH~߅r@yvh٬(,3:Qٯ(/gi6mRND=>~. M/}LxJ/z6}g_v%"TӓN\!E NxY)|hoHF)V5~Sz%vL~/$e#bZzWv 'adΌı;?0y74w. ]8i5K^vdb1#c)T *ipuzB>HY+?>_IDEOD_vh[Etφ"h[{49,||Tį읞u_>5c/V/%lX/WD3OI԰7ѷEVJyYOVo5I 3'~uhmn=~$-)N䍋מj26xRDd*c,BGDiRfX B\sY(L^T_"LݡV7s:,+OVo|=j S[*~⩋ N?U8M\tJhY61'$h846{,,Ks =׊-bVQ{twOOnl&{v{0d5n;~7d(+m}O4G,4bc|r8t15NJVmW94dHlcC YR9N4x4A6qCxP)qƟ(#.Pg9b_`r,I-%d`JNXkt_L>uBr;ͽ["ӑYOUՆ@/I!#MxxD' aj0gMoN[y5v"_?H'5Pܿ'9ڕq# ͅdarCJ ni_lN'W;> mr*z el#9y0sT92_o!DpexxJ \RY}=` G4Y;2&$2X(L'ʲJQ/cІŽtFS$ $W8\"Յ2ccN Y2˜LbQ;T9P^qIAʙLl,@⑼4]%,C&y!1*xXtlbsګ^|PTܼ/>ruj-hz~/’ҌVeoМ,&{gL]Lyg=Qd%&˕`R$jUb6&ΤK*d1ܰ"B)B(DزRdbqYBɜT4mr 3rg +k kO鯞z7t]~=z=)3QL 2_ Gqzf^d|Q8tX.\^ b#=ӍUV7*ybZ]MXg҅4)6;3D qL9[ e @.4V[f "`!82 љ "#ީM26g.Psx2 /?>&!f:&YFM?$fS2 d?PdP7Ra8^r& .&0- 1@JbPZԂ` p+j:+ R|͆FTˆXMN̆JjdC<&7m-{>v˷;-GWmrqnOOO%·/oIU@GҞmݣ`+}-Ԉ=(K,n &#U&Y'IFLQOTқH/@62V\qjXXM2BQ yGՈʳͮ 3ޒa~ ؿ>E?opux>䈭Mh0!H081i y2:ъY *81'!Y1NE.b +P=6&+AM&ێYBJ Sf[xk&fWPttE=ʳI$X聧'OCVgCS )0f5\1DN( (A 6 Bu+{䩃<8,rv}A3#NjzcB$w 1T#a4% HljZ\QVYR-:PQ82oP?Dj[vaay?0J|^4 :Ls0L$% "\іĄ"x貕LzH7@Sc#d* l{:~,WН.6lFx1SOGb\w$1ńzl46{ҥ,6Zhx!-՗~[^Т~㸹ÔwcΣ# 7Z .~Xv5:RQP~xs˩4oK?pQȷ'ZxT7`<$0XNF0aHgr(!Z3*< Ovl_S|*lPvH@fWLF-GkHqJd@8zEc>kivuٙdX1֑qPV> g /`!G/i}f^ 4yW:g?UWqN.Ɠ GcRϿW*_Q2qfFV2 ճvʾ3UmxޜC޼]Ё6ԇD(omEӻM'O:z-oNtn4Aذ@::?^%y秋3,?`WK̮&|O?`Rq5̀ޯ;Z;y7 i܌&^Nׄ˓ӛ{ħ?m)ϳlr3/}ÿ\~V!T0tN "ƊG4(KF>H㑧sC6D'mP RLR=$Vgy&dN"uZB4 "t^* YNqP\;K<˹.D$tvq,מKX\Fh^(%w=<_ըWmdܒ3R##&Ǣ̭GTFCl\0Q–jvF9&}'5vP#ѷƲݖ4*-AyK U<]TiGpJn2NhDcrnf#%+-u&! ǘTLP23$sGWBvN8|7dm9.= )yRjӖ,O?-ݒ0yHξΗ;*⪣Hchb"N$ip'$ ׀s#P|dAjP+,-NjqVyOA6TBN0YPʆDy rh߷6@7QpsLɜY`O&:/vLl\ѱL&rOV8pZw)ThV/#Bs4K]٪A/dZ:Wcr{{ؔ>ҳKF%, = ޡ`rBB)Q$iy $`7[0֝RCjrv$yq{%&7c{q!$4+M>9zR-:b66tJ/r Fq*I;;)k$D֟7#czE9FO< 8Üb$X.]iBɛCD QX% Ό*wN &}D,lYd$BQ3"R\QG<(E_U^=tBJnk@=iVr wHXҁ'{8l7}i,|xǽ-icCGL) jQg޹1Kurq䅖aкgwKT;y4DEr%+oAroR4gΞ.CV[ 1}zGz&U裄 )52EYFtPEVZ4"^(>Fڊ$ބyN4:e"p%*pzv[<|z$(\?BWpG'ik;>o]$8HpuRJ+d#*j-&f,22)YȺ{aMJ.%2G {0z ༷ i"<@\ gePd_ Gͧ;d1r5s{ٗ-ofoU,xyk=0%+˖!c1Kof Ç^-c,&wjfevݕ+@p8OӠ?!ΦwK[]]sϰf2gv>ݫ6#\_f6gwYi$.}Nv?}@Âw9QL΢]d`)g֞t'ȹPn{ 'PX[?u32g~x7 ލv*?{j;dRWIKKUt/j+cԗ3׊ޘ{Gv/~[ِm2KZ-~J>E_45Y봝um8~֒-A-b-b-b|Fc64hhhh"C%%6Zl6Zl6Zl6Zl6Zl6Zl6Zl6Zk =5RBYjwE _fR.seLhx4N>3WJsBHA0r}c ܊m^D *D3BSiv`\gNP$t`ʙjySH`'`E>\_}<#s'Qx{ҷOj#Y5a9fD6<٧Uf܀WăJ+ExPd[7r(d #"(XL"bǐL AOb`. /P0Y4)3k'Y /0E=Nygߺb fk{Hn#3Y&!w n.'0.ź$Ogl~]XLg_d45GiQB 7w,4YkxS5_/ >GrIJ5nsv>?zn{9K7tFvȍ̰2FCFQF =Z}vYTDbY f^])oO_W{'6(ϦG痋O}~#gRZW,kopv gd;>ssGve惖kYlz! ;> 2=hZ78GJx|D~/*\ޮȎ%co}o ebis#\H=x>j+H\oH>M.g TYcvՕ7|_zO71!Zvv2~KgX&r˗g펠Mi?xPo3`$0꜍`1gF:\k\ %%'Fҷ ÿ.'f4[dOL8d2&.-:MѩƜYj3Щb"!wtDW- [M6da,v>^-}Ul=m0|/P*n'mۇ_i0'YؓA?/f_Ζ&~uaj|Crg'd1ϴsXa~r; 'Q+yK &+8($09*:'YU!C |lLVg\ͭI}`t*R"Wզ3˧ڋ<$1},UK˅QVhA]S b^}- u sL2]aJ2[O de wR)!^L[I(4鈲ްN ,p9M8:U N-bţͣҟ4+fLQ0p: /'p =_CvL3^|ʰ;#f+9[)b[NzCY*Nq;JJ3RikY4!ds^muԂi;& Eci[MsOS% nzv4x$G"i-.WW;#BSކhEU=s,;a2AJS0g-degaƔ j .*Y Գ^qJjv$lC#e #I+PX * zkFHEBgBS*pPӯ,&38FSd\s#P|dAjP+,-NjqVyOA6TBN0YPTEy rhi d JÛb8N9[M&dBe,Cx:/vLl\ѱL&rBV8pZҺEBӶzۧYV zSQj"rAeT>ҳKF%, = ޡ`rBB$iy $`7[0xQKB^Y;4/5qG[^w?R@6ƍ{%n)XuБ[Lggt K1ll:_T2w!vR2Ց7pRIT?o;<(̳\' 3`Kk 8k)Lҕ_$9DU0y8)kUh;'>e,|2ӿ!F)ͨULL Mg6{2րB{jlpCAnAd_z,|նIb%p96tJUQtZb-@">tDVJd]k$"+% >Y.j iuO!SW&jzI֩Oh[AI>\ԗx[nYDzB[En G˫lZY'tVwܕR.4 G}8QAªPVCYQ~k dz7vmCZφGv׳?~VJgZ{8_!)H 쮓 ~~ʺHElkC,Z45HkĖfz0n7$ 5j)Oԓ~gywMhXa@\GXe03͸[ BYWReQ1ʙo'=xkaGö p(O!7nj@LQXr"d 0x- B FS{,`To';conj52ˇ<] `7ܛ;m<ܿ9=3`ztVѪg]*#z۲~U/k5SYa/]64HbTcMUjԦkZAݏ->\R!_,zXekYyiFGըaבp6VK'Wt,or?Ө7Vf^{F^k>lcc(gި_%n6XMQN DM$P B 'HQPHY&"2DLԓf GYagjd)Lrl֡>Aj Y)u"棂u^)%,|͝jFg:$o]t̓uӟ'Ik^?Oӭ{?l褙[Lt 10J zadd`"ľحK-Ja.o]Rd" 6JR|UF%J z)HP:d(*Cp[)9[*INSvL;gM諐Mvs*mjYv'!>^*.x_@QLtS>`*XI)@v>D]P <AJP\̺⟵?!ʘ1XPZ4A`/ Q#D)%/+ u% yT)*.$vqY~B7jw&W8+Ohp84<\|爭=eFMdrD ̞D* b,f$LV1)-b+R6Tg'*PPaS0Yp1:%"3qG0%skwۂڪG~5s;4XvQ%+~r%"{gNu!K)-2ZPZNDc -=Z6EG-cM"5cBv*Tgnљ8a;U`DL?6EDGZ>Ɏ:RB 6 kSTΡR0$&%'-t$0~| 銱R%P*΄"a6)#{ҀVy+1B]gt&mbg~ԁqq̌kbiɦh:Eb_m0cP*]]ZxkbQhXvLBe<#@Q::.B;ӎmC>-}iCFUM6}? ~|G8%ufvƒw_62MښmƋ}D:IEI@}!1d.Tfw;ƧX'-#QwT F@A%RDa0bSt2&)JʙHhvAh4$>']wJ56gegI-⥗D"xJ^(IR8kUmQ,kPؗu׋K [X JNLD.uBX7lv>R)kCY8' n$A0"05$ "@J ]֡(ւRi-A)r [:,=Y fa ʴ9XB'Qw&̒ J*ZAڷzOQgӏ"~=z:KS硘V!ءqc,3ƪ3nJ vǍU)ǍqcȂ! \*-m+jp!0!bA˝*1W,RpUԪP^Dr~(r W(VW(-+6zz2b<chy28]kXQןO>||:@mx6x|p2#OYgj kd] .o*m73kfroŠmHq0N&!@C??xjp>YfэϘ01a9Z}\UOݭ(U`mwfE++J䶯(_Q^"Vˋt[?Jay}Q-V(~3߰J'x4\ ->1c =bbANoDvf~_zB-&+QJ34 %E@zT?< ,ruX{6Ybi0 \S@צĺ iC v'Pŝ!UZJBF!:bi0WUJ{zpq͡v;"vU3>joZCկ9㨹wkDۦ\;/SN 攎,6IxpB[:@m-ȥq[~ dǙ$>pϿ u`oT)z@PK0dB cNbN@E:+hRT)AhFYA=|~glqȻ>Yqx8\ᄑc N޿_x~wz)kVzu:RR+ɪ,-TFo -Y(*( ^+=DYq3|7f䃃׹oniͲ} 5֧7~J9،KH1̊Gϳi[Wˏo6yb-\4:vWZ wWiAl;R*ٓH:`Ak#6gAMKFM *g56뾦 [߼r|fIUƋrԚLRohuV~&9.dGYx'#oTU7ͣ -sJ*E'Aڔ{WǑBCvUT7# $gbC,*һKrp=3ԲI! מꧪ ;+wJ" sooͿ|8~y2>q|>Yqu=[{{;.o_gw0ױcM9R&~ţQ|8{Q(a#icMFSjGit~T:VHQ!͘*?R{Nn*k*1p;T[?t,;92 d5t@d542pA yTxZZEA|**/ W J4 cXC"_/|NΞжܸI瓸Sn8Td 2!ޘAELT|GJ$T?;jھMF4)"DZ"H,(uJGgrfȞ!č (Sap < }@R_\Y'WH .)LdDx!}]β>7|A 11j6ڒO-T BfF 9GJ0\A+R%z7*R[Eڧi oZ#9 JAC.s 7R0Qh~Z?߮EtIop2I~:YƋ}y/u__C!z}[HjdoQWnYX qdbbQno5"{Ŷ0gX) ypuWME_VK|9$ כOFCM[**#EN)^NUpJWk?ǵGC!KiH5SP*) `s`"///y3سPnv+zLhsЛ>7]1e ,o/ya2̭9Ml\c昵yob7&qJ⌿M,d;"BZHx:uoڽ[hravg^vA $-;F%{%sGq>wB }gTf+(qFk`2Z#441%E:&ʢ$jRZ} nhSX[wl6Rl{tw13.1p]D1|s%\V*)uV%'rS)e`1шD,hGm+%2_;.g%q4|{({ߎ7Z+||sJ#6+Ƿ'fSoRϳ.S1Rv6P =u^>:y[;rJ:IL1xF`o `"EV&d* 2TQk|ߔ}aPBSA0{+3$L3sc<0f| !5DwA|v|ÒѶ;NywV=Ȟ_YEs:A!+;6!h $4m3yY*o[V&C(6-FzVo͆soY{geٕN pFF&v;|޳ï=,5馜'W;7bAsYN%%;/7 =1fdAiƨ~WRir9;4 ORk $,X, #NƖ۲DQ\ D$&:#b\PG%luaј9!s?\\̦Ár,pI0crb`dB%LqL(jTQxd52qc r8@P'BJW#pA PN9;(; %QUԏ.ۂPoY- Oe^ l ›DIX0'_t~9ʽP9PvCŊA&] h,R Yz`t.01)jxP0<ǚ1"е:-hkS=4_($lLTֲ͜Կf5i#ꐴTe YI1L&A7&|2 EQ6[R(%TѨ1P.PSDHiTDj (Q:πh0+ *sQJ`#pʰ9A2$dil8#K ,CWpW鯞{5=f\n?b=4 >FBS:z\)>|Pv-tE.o|S0t:{ړqLaZSP-K{r&lqHRbWP4@v el.e,mSy0>g Zas dHч@ XDJ Ȓ- 4Qg<0y>$Jŏg0B`_Gָ9 %4~}r'-ޱvVug\]\J-֢o|V\ ɱceČ.X!*EM)yЮJGѶX*)՞'dӓVSUOLv)SNX/Tkl8HkUf-m!ImfDٖpv//>,Not:{?=],pH(D 9d g3Ylj>N,Y9͖1IbK+P=PPb5ҰA%Bo',[jsl륎Vùbybn&bhG{gH ʔB ~rb 3H.dy kQCCv`5(k3Q^t֤,\0p̵$";ՙ|*fùNTCc_[D5Z"ޙ Î:ڤ)+bB IվMQ:4MaaPrж`C}e\12RLH!ZMf"=iT$=bfy3̟:]\t+:Xg3).vQvqw2N)0ȱ?RUMWH O& }NZ4-@::/.hOa6Cf?{&^ Q~g?G ,?g>bٹ? n~|Gϩ7ُԗːUS ɫq6O wu8ׯ'@SO~eWQ[w>?c2 ׹g|?~'qF)*c7/Y/Cy#wVzÁ~}f߫!7$@7 )9 &mա7w;L̼y-߬M>Qgy9?_ o0Wm/}졩={Nu3-MX'fPkK j˫ 'kLL QTsD-:k\#Ƀ#rDF>UyWJ:D̑}.g4*LN<$xn)9&Qcمl!fP$b뵛fy@^&CbzMx~9-yg5G<ĝ>4S厌:)~KRHi\^Rv^ 8D"4X#gdqQsinǨf^J~q<ٱ6XPR(J1j֫|6XҸО]PP''KB A`Uթ \$?mL֘%貱5׫?IDٛw ǘe!+~Ѕ` 6\l֋SU Re`_X2@@ɰdH@dtWM~2E4KA~'=#ic^5eV :!t.bR$Q"ٻ6 x~?»I-ÝabDs O!C DZ]6YS]z,&eY9WB]9X̵Q*g ɭ`)I!p>r9hIM/sa$2iϽ`jCR`$Ղ6^1`4wDnBD!fHmp6HFo,/þ6*5X]eή;OvN* ve 5'`Nڷ,!PeAd.xu7tk,y$ve䧔cou᭣dgKl4WD^rÜ9Q6CQV8dMDL"&y1Hj.8&8<qNHd4P@, p!SCۭG)0$j֎3x-6 b̧h_ ڬ{uL0E`Y1 s:aO:k1pY f1grHx'Vn YꭡJu@tX6nf\*Z.5 kEaFnO~^'`Iأyå  &՞k:FY^1Tk%iXQtw SlVp^rU+(/Qކ,۫0.)[ >hYv0%w4M}57F.C4\l> ik[h8lxh?K)TNto ^AIwnVf\>1N:QVs`~lXhCu^Ns<{Ӂ2[G$-sh۞}DyiuvZniQlx}soU8?A؛UzR4_4mນgd%%~ջ_%es毴7gHŷj+#춃:rn\Nr3udH#.ۖSGSGϸYpZFaz+Ct2!L44$#!LI$3 ERQ)Մ1ѴzK*Ljd<)C꒚2""XH0<);>茝'Sk\>J"-lNѢv}r=b8|4~76=o VK# [c J" )tHyDDb9a8AHpDH||iz`x;?J,9 B2 (#I]9lP;dTS;,YydcOOk/%%<9Q ugS΀0l u;-=lFEQ^o3Ub5$O7s+l[CE-^mZZ06S?֦zm6pm)c%\D%wR˅@a|w'ĈΔcx5v . iL! T^G@/#!V:e'fF׍R |P\ DZ~ E4n^LC Z}B9fRȲ@BgWfG s): /J:+o3VL5G= RNm9"E݋~- u.ZPWToN9}3&M9YR8 I$M1p\=$)2tY?tbٙM{|Ymޡ6c?Hz?E)/Iڎd6e+5j?]%ٟC貶F f]=ݻe^$&N^ʭR!YCțDR@ Vizemb=MMHvcP#n7;Z q;֓N[f=lqȤ*pn n `<Anp}D' Ljni,jf"lwI AUbRёkʹ t{RvVp?|-*~ؾ锜8N:KQRc)Ob 1aWّ9}#χwYT`%ۀ9JIR5xǧs@E@$.*ڙlYv\uCڅGU'F)xOvJ.8W%gr򿓫]C^kAy9抺k ć>Ł\#sA.=(2R-FTP&rΔcX9CFh 4`$LiߩucFY; ZDl ka[0 ^ر;mbJ\>=,L?X}Oj| ˹>'[F+n7`FLo 8Ǘj}% J%`9bZTwݵP9 gYYrA}0bz<<8*`u_.oy*ǟŭ@.e=KLW{0uQr&J' RZgҞevvYهgj#>(vI1`JgVhܶ:Y rX-٪j j[2}\o/ƲUxbO|g~3*d1nnt{}nu-yҘ7%i pyʿc練5D*1,UbWSĭFWD7WJpOɺJ su2pؕT*qp) •m* 0>JRr*pGo]%.ꇁ+r9^. eڍ]n :pR\ pc&Xӵublբ7\eajVz8-_~ˇwٴWM=&13B/1 4|#V޿ߒIFOa4su*0 Cs|.9=LEV̇Q9̌-//RsZ#3mV#ť͇nWwW]3 9 Ƙc|:DGZbeLq6ͻlv=,k7ŐwصRݴ%J}<@rFϵ,wJÃ&qHUgr8]dtmbWSѵ[] ^׾A]KFJ\K< *qzzpŘlu00*e2&f:Kb KR8U41h/#)B{8>/LI asbĮ8ĭǎ͉KM{l~5f\.W[u86^fT\L\A]cĥ-•ԊewpVW.GT*qK~3df-•NOɺ99{l;\~"-#ęx}2\Įzmj'n5z0]ήpEzw1L+`}2peT*q+p*բeGw9VЭK.3 iBecVBGG` #WQL~,<ߟɰV[c&dd!L{)c.}~RC{שgIѵK׊,^ΰ5,GyD▱_Q֏ 8IepIQN)93Z&zGϚYIdfXY10 f1grH8+z0iJ/:*7Q*QbT+[-Feqv?P_| 'Yk}Rz~$9g_MY^g^m _c'%8gQrMydՁܜI1>{wؤˎwTC)## Q*"p'Z2#l1:rj5G:Rb&yRF)þ2Rʃp-kYy;c]Q:W*of%N.#_m|./nseӣI/BGEsJgIMMHeq&L@Ֆ(Xb^a˜R֜bZSSO9P5Yr7AO?Pb*@XR"%~dNF03,*͸Jb,aE ! t`nX Tʥơ~źXۈC$i(J0".H%<$YQV0z//ZV:L΃S[P-2RV(&DQî_H.}aJHBY7ȝJ %Z3L[A |r1Zkk' Suup*W~~+cpY*e0l822[$|@6d^T7sO Eu~R aW!p.3c]`nYb)F5cHιK M] Օ3ʮ/Sr9+RSE_RrV))ڛW:D8UHJ/0ϊ|40 b]w%?{WVd؇E``wfv1gi#t+-;ȴ-o8$KUsU!߽8<fw{+W~,>uok\x$?'avHG}~?Lꞕ}2|!y%Tq޿ZяOwBZ>b,Z Z0vzrg_̝+3?\v/[UI쨂b $J Eyqc~_F]Z(g!v2prצq0.;yכZb6oû }xirio|slݤ>s4E] GkvfnBgT|ꏗ_'n<; lvTW{*ѱtOk ~ Po>swWUsvuֵ]+ՃY!hf(fn9(R>[YDDpB丑Y~Jڭ#EʵeEcίfy\Rb9j&-} »- %~97>ƾ. ٭aoޝ~ ';Mpν,Q{Oݻdz7)W_n?}Eo}pmkڑn^l'bBĐjUHΤ}v|˧ljHKXY=DaɣM#^I%;@TιLS*AYW"QMжE _p!kVpTM9+Ueȍh U `䤱sp%& _=(-=S>ЮRB|XӼyX߾to{/پ[2ycjbU]%evkV`1V*hU%j?FESy85z-"__ͨL8^oo"|A'Aw|;7]Ӑ>q{PNOo pd`jv 2|> CqMP\4xoY[Ԥ \ge7s<-d>@(oR Q@k IVqT݆݃C=狺I~ګ6&6 bdYde,+xn\:(@emLԃ52q˧ᖄObz$|=uKNڦ'bƼqWnIЩqɋNFnZq!gqS_jza+M%R?~xlIg/I&~w!v{!VvkQxNz[3gr!h, 8vp˛ KtB$g&S;+&h,ǮY%HmMEYaLط^oRgv*)mAF$\HRUs)@5لz!v z=/g'VUoĎ`m셍P6huJhÜ]9$yRdCpIOFնLr8B`&4٠H9ׄ29䵼!JŨ (+KY0c! O#k9%+FYCKdp!zӡ*sro:joPM4\E([Y X^؜L0L>v@>vYRKM%|M+Av*mZ ҁ/tlJ1$wt~zz~2(xr~Kj2^oEkT2:M-J1~g'?ZZȯbǻ(<{1-coIN"EeJ{'([+y9o`.Y!>\?MJK瘽[fsT>M"JXt ħ;Z73|2y;dDk 偂) N4Uyhj>Jq>sT+dfiD0bp^ϭ]+D3g\Pڷ &bMEV|)9SQ*g*!D"$pv'<5[czczct8EY#Z7]#jlDU31aa@FfRc VAUkl=FŲev$~!6Ip_?S|57͝*cLB \(3 >:5͕سʘkǧ2?ʘ~2U(MLXC16oԊZciPK# ƒrѺX8z%$(HvTBC 6NCR I֗Z6[)c HTdA:;Ra-U-S?^ej.>ަ9 ʲo.xh=;oC]Y*.M.>V:P_]U H Ih5` diSrI*(.)2bb6'=cU#3κ .')5jbJjB쬬&y(*ރS8equq0^6unۆ^NZEsU@\m[&M/#"c\7>3JLd_|^]#fc 5msRuݭr{ B8'`N3w};{$ռ-)6q-F0C$LbXC컋Mn&妱'ÑR+c>,jE HAV0DKrHǩΕV^}QzG__668bgpȢ*!9(9d&-aXVUiM43@R3{`:G( lgVٛ.{5~@:LmGA;68x3h}֎uIT _/Rx#J :bTN==::X4a&\4;3;@L̺U!C8*?M* c >$Dl[TYԢ Y-; F*ubIYGq? RaMm/|e^,zc~|u6//5 ZN煠q#Ib]hpHr ; n0W[g[r$ٞI~$%QQ%ʦl[&sXm,8WHP T=Zp >D 5p:qBj,gyHQ_JD| om>:G۳UXFUyJG\1s)λ(^3tny$<% Vn(=`1\wASQZ9,"qβ6._%$we_eJLryJ]3jg>LLj9+KI}p^٪^¶!K$IlҦrEI>(/xr?nJ|4ZS}呢탯 HQ%ZE˝hϾƓIRSLݤY+-t-8\*xa8Buԥ6SMPzDoLƉ:g_闽&4.8~~?n;,øO s3(pzlpf;K'Ʒ軳(~Meš |L=L}lVM׭)6Wpm!W#୮{n.G`Q7Q/Mbtu%HCנp,!AIBU$!2E)o]GrNX NP.8"Z_0%,9 p{uBp0ܬwAz焲A RM=d =!"=AZ}t>D7nOCA;| ;-𹌒mho.kZJVUNi")ծ$eIݒm@[#ъvj"NȎnzY)V#R[D׵~_Uq-xm !E>`u~ ؒlVg̎$) Cd1)031!𭋵CmP $KH٦*ʤld#4MtيnύH߇+uU X(ޟ/ =%PtR,ז+nBB޴$ue6Pv(i'Y;U?\,q n%T8G ;'uۭ>lqx_oCfI+a^y>z뎯M f:.ӪY./n7 Pk-B,A>)@eR%?H+e'n=5_mb |^\-gantJNHZ()1j'1Rp0aD,tֳO>;| z ӝ$/9Rdo#s+$ac1Z< d0AX!K+|7{_g^ƚ?yabz}SdAvۡ^{:I FC2J$U@*E|崘gr\꯵%gf\ϭX=9?PZvu]) ԕA_}!}U 9lg۩ 9 Ƙc|zs(%<@@E>ג)E 0euT9,R"1xkv"Pt/:!fH ^ wE%K h`:j3zWOk nJeT*|Ǔ|^η3$(PB!d>óǃwLU~пd+f:ITEqA 3 IW̮şɚ'j|ßǟRO)S=qEMG $DҠ6TXKAEc CiJdxfGX,+b/3/YV{qð EƲ(F~w_ *@bXOĮRA@С c\룁.WZyU⒋ \I"?L.P\%v ?JܲgJ\r>+E4+B \Üࣁ.%W[WKzp)Ge +J*},p)AH5WWJҼI Sp4v OVv$.1HWt]S8"J +|4pU[h*qWo5Wp-ިMSPj)E,T)o8-C? $?wdɼla4`rx#3O)ڋվ{+E9p1뇯ދ_SXٗKhmf"]XjUY%Tz: J?"wkBtİ<#ѱĬ:F:[1(##İBGWBGW[WKzp@9GW]ɎZ˺E0+6Sb4dr~?U"n;wJyJyo+T54{37$bmǚ{,0WZIaJ[&Z8D#%2(Bfa_IaU0gf[ʦLS9%hdR҄ȴTE~}:uh L%X{nۯd!rFBB,[XH- ,HfQOS]:+ko}1'=$J$B!r 'C)D ]T)hMj1d$.`4%Q1c͙.f/%84,_5~d3r0tQEb ֥$h8 ;@ hAGVP o?5.~!te`ˣfִص\3!̶ 3f`hTԾ6J"RO>&\OƹfI>mAF30se16 i`~ҧxw=v͡YFh޵󹹝_mZU^;whG@-v2JD@D֚`SA(Cac->ozm$1h-N&PfTeeQ9b0XJBԀPEL;KF=Orcmd2 e^WώVvG5eŮ,˗,9FwoYz.m,s-c[5bkzT׈ѾmmKFv-o.G} {tf˃6K|F/~}>r߸ေƥuu;1W sҘ8~ƫϲݝ\ٹve]_vB g^^^ػdvR2s%ةĨxfՁlҊ=?!܉ÞʎNBz5ɰ$$)Czн1*ұJ"laёS˨`.Q@ٓ؄ A2 (ũ2Rʃp- ^#:I;caDx3˚3q2rd|' zѴQ]u7O'իiô¦$\ >i}`~0r)_ [JsۯNY&qcf^sQ<QJ:o,?uK\os֙kO'WiOzIW"tU4w%Kz.ݧ~I/h|7jp ۢn>s}n{yn.}c8Kmk÷\۞ \ [1|%Pi7 >o)RJd/+rR[Kc~sfN'(^EymM'bD"2`O,js@`!5󁵛z B͉ l)- !"䘒1 ɬcʃJhKavbgnjm2&{-rE9xthj}7;a=휵wL ih@`2Js`4ᠵ(ʡz[br:%]y[,ԧ9 FIm|@]( yt9n2}>E0V@e)\ 8oe _ˣT߻1AG3O!GrVr)LBfA:r ttlBY~kAY![uz`P|p aTh֯'h涅T@tE|J*Uf  a}*|} LD1$$!"*d) V"\JdKude)de%]U !ـ!Vl24vAoYBW+]bp5qї.|vG>kd{Os31_TyOtQzܰ A}U8-Xlr9*+@UFU0H{OZl|.ZU0L ˜Ci3F6SF`Ȇ`'^"J2T;޵fIƱX[Babᚣl2nh:Xؿ>e3/1rn8ͿrĖ$jT  119r:>'*8!raYokң, \߳f^=~*m+|g꽒ǹ\46{.Wβ[9em%Ǫ8 70agý-Н>s)D쓍eTA7mBI @(Ze.tw-_ wX>}_F)k%L\f^‡n/Ua|0\4G[fnZ0f&gN%x}|[^l ;hJ"4z0U-ۢ}ӎ_~oI%^X%uJVQ')e(g:T4o$1oda[%Ye{I.zI^Vz~hpk6njLi+k"8,H%Vנ<\P># c?_66;hBY' jǓ,E^"Dq<^vXƙfy'ȓ<*% d E"e傩L7V̑ Oh"ږ7!ZcMw}(nҏ7l,H'ո?+9u[9q~;~3#X528fܐa&>,30^J& K%Au"Pdy)0t @֒YK̦nD1B!2>=ۂc0Mw%!7҇:?GRWP7n\Ub&)NS"u/"q<6t|ӻ̯>0yMԟ\_4^ܿ>lCTE:-C|HvEz]u*ŚHjs,ID. kmAD{V5gHi)҂g>~f &Ȇ^o]7DHOx5M5u"ڶ;>fNR"3w/4; O@={!0[qH.H#y2mFEn| 8%Ylp\엻 ૭[J=bE=V[l GM$!YHH:Nd_erG{ֶq2"~ҌÙWum5-H;_j̝m~_qor.}SR{ F8ֶpgOw z\_? %Ǎ3QB## >QYOK. (!_݀ Y•PŁnWvf?.oMv۷LۂnR˕%t(F kaPƏgh]척&-E4#Y:Ϟ;yw7jS#rlN}7۞VnqwB%bSr8Eh$,6.FIQ{<)!C`x {&'*em@pĜ`x%$Lc&?(Vok%<;EmOY=6T2\ZP^`}Ũ.V}O Łx.\㹠GD>M|:h13P)Ǭ2r3 hP(ͽ1Iҙ-J9ō8`1wa ka[0 ^И~\SUcZy 꺃jxcާjk˕Ba4lW*ʎWA}|_5T/`4bR@UJTqRBpwH co:|۫-kLBz9ճt QbFcesHȑdjE R3)!I I+PR2pDBJAY&D3LfD9eG'1{q#4B{b?RC2~ _Y{* ңqw23_nZИ3^ . 2JsH-JP$f42+ZȢ4)2ipѳ3.Q K5vrń^k,WVG n`4 5ͪgEC%DhK7gID"# FDRQ 6P4ekH=cR=C*2~_AF760~;<tV9e-10[qhOA.`#]Q*pPoM :ăc`.)F`]Ѷ|;"жER3 X/WpfHƨNQi8kD`DqXr`lzS5[}ڑk$v/qf3uC.*=D8JHDQكMAOf5_=*q9G]*HbRcW LeƊLo&gE(/~qaD:1Y k@԰479aO5`sGD0{F5*Czmp6HF7 rW.[sF?1HGSIt(4@rRW]N*f;>M:vs,&aa||VD?$Pof9lӂ[-$iT|񤔫 wI_<&$wm~=8u6ߵS0*zEh/ժnߦ&* ^\<e%v]تqD18 Ԙ^-^VFlB.g"KnӢ"Jъd+k,XZe/^ PRc@9tt"i@, p!S1oJqTSMF΢`;X+\$JKٚ{} }LCGOx%-7ZRlweF>~jgŀ {_G<1VvV5cR+-9R 3̿dɵ>='fl~f='٩x2Ϟw4pg@c6_|{EEFK.3jpjÍ>ږtRL6MՐe5u母NJ/ ͍'CI' 1B*IfMHQw/_`-LW,‹0}o_Z |.@/ De0680Qs֨`]N$AE,6 m>`eumCkݤ ׵4uw\ޜOg]@uO'cf1[6T0' M?`&Pi,ךtqrF~`e{P!!,$BBXH a!!,$GO)$BBXH F!!,[HBBXH a!!,$uR!!,$¢mBBXH a!!,&#MTBBXb a!!,$‚ZtzV[Ij-#K#Ɂ w$ɵ;RcMjr<|԰' jm퓏^d7N$הc @ЈNu0e}TϢ6-JZޘyut<.dͦU)Eræv=NvbLo&W_SM`4+fgZO8BWߝNk7TXݞh[.ekumP:Mh 5gSGQpzz,ӳpzNe,˩$pr~Re|T57▱չ/3!p-LqYC*Ncʡtڂ>%Rމ{f}OƓ\أyå O=cFD3@cĜE NVF_>_H;>~;tiu}h=MDo=LϫGϏj̓@z~OYozf >9N'/\N?!Xh0!MͥS!M `S++_ J};T-W"Ɖкb*mViC\za{E+> 2,$ڠ=vs/z%5F.rY 6#NX)"ey~d&0R!eUʎaj3j̠!$"﵌FhOFS!Q>0wchLFl4:Iitve7jۥ>~>Bڥ$/WJݙSLH!EϯW޼x%܌m`ӖpN MZLNR߫)4 OtOxruDY :^P~$nbe䍒a1o=9h{\nCpYРWZxĺ^+:6o.&km/9Xpakʭ'6 [k\=FJ]:rj\UNr_1udH#.ˉRGRG/8'\//! ΄"R KCC`”D9sPd(5^pERM()!M3F,B0#HYkԔbA #(H8 ]yg"[sFL,qbvJA8kXMh 4]a~~zl3Wz޷Tb\?n\4CNR4T Xc J" )tHJyDDb9a8AHpєpQrȒ0(@GY'HZ e !z0`"{&CD{RRS";ŜAk˝9#H*l4K@bN< eLJOoQڮl+xmnaߙ|eCS;GAi %VԠLrRF81793.'/V^S,fy3`@M\QD#`(AS%c< wb>2gol9>y:X=adU!}&)c!$!KytR\԰!h}6X/JߣEΎ9CC,P{e7؅od&È'n m;͸"H &N6Gmז2f (0d)rTyou 3|3kĈΔc;LF4JMU bHx4k=qpK]wRt狫XEV}i'f էI!k<\l"XFL~j&~O ݷPm CZj~k.{D"ܥ1q8O15S!]QM:A_7'E٧chj~}~1?5tZntuqcOAϞD-703g[|Vw1UÇ& 2A\?kԤϹWM.<#nwɆgl R9sSH7a69CՁԡֱlDVwZHH:Nd_erG{ֶq2"~ҌSWum5-H;_#̝m~_qor.}SR{ F8ֶpgOw z\_? %Ǎ3QB## >QYOK.3I\ϡF_݀ YMGnWvk=.9=oMv۷LۂnR@etШ4ikaPƏOl]척&mrj4#S9;Ϟ;yw7jS#rlN}7۞Vnqz~6xWKSr8Eh$,6.FIQ{<aBTځVR@$jMp>b G5LgÀL{C zY  n:?v-$v<_-q +߻wʭ?j%ckqFIg}ʇ,*jm3\d*ɀ$P09J}%x*_u\,y_I`V|}b:y;+eĊUx~V}w`@33_YTaMԕs } 3MՈm~z$[H(; "I8TȘe]I[ւEƸ"(S2nPSF޸`}kB`})yH"d F!bڼ Z?xE`o< *29QK.<͓lY@Siʭc~??kYu'kʱ;y5r}hlhuQ:hG?>W$C`W'Njܦ-$e~Jt(o(-f4/<͝c-wFdBQ(Dg5QX7W;5t$ȓ"kƼ_?QJW9<}NTѿW];x>UY^K_ucn :x6 ұH.\'+_Sat1Xx̷xJ,ub?_0>2HLUQ!w}zIk|sWQ/ssͧC&k"- Z_D 7TO1.: q/?.s zak6ڼ[ [Ax<}aA'i=Z9ҺR6~2shraFvs¸3.(H?e'Bt˅;k3ڄȵn3[qzaQ\񅋲Phftn6qzvXqc VYk|Ue{鈫O:x~OW%&} Ql,99de1]!-((b F`T0(o]Awhy7Rߖ<]%Wm KnnfsoUXOaX@&9ٴ/(BPx"Zw4EGQC;M/5`/xO4?n=qj];7Js5J&Et5Cl {=맿YS3oK>_nWN<ÅLoۤ]oR.fyX~Xy)yەwg6,}wɏWN& ^j/Fe"b Y6oKs4..V"vy:Zyzm_G~ |rtDl&ǟT%apHe[L]B>c)!au;x<,(yX6J~ʻYetRLbJǿ68 %2$%$UBPT."0d]_7e*B0E7@%apČ7͆iT;XW&wo; 8iL}0b$nQ?rZ~yBUtXѪXE{:k! AT'Ռ^2p^m-K//|UUoe6|ߨp&mM6Y%#tgHE6Cfki,ph2ajRS ˢ:[bND2%fa !BۂQ(GĜ^$EI9;'8ph4$)'AZ ~B}-g*^{ ]13- Q0*PYHɖ ("R!CvŹ AM9MABIP)…^+ѣW:|uW !, Q?zrTFں+kW+rx$*rR9nPV)G^j׮c[ˠ.ta4m Yd OWY'u.01)j^D94\sګv>. 4(Usw`̎R5s\+ٝ姍`klVK.,S U^g"!!D`hEc\tفVQHhu<@u(ʣ`ESZRĀhlaWP"Q` ;*`Xc*׹ ~da *M}^b֣O~X/| . !b̛u]t2#[B!2;u^Ji%Ң7L-DfȌ %2*[5)h ׾ sV-(0L"2ΖZWj6aOW0El&ZDllqE3)e]B 6Ԗ@ k\STޣRưI0 %/ۦy'0׆cJT;R YTv12!Nj64Un\l&%E..vjɈ#zRaՂEa/+g*Clѓ"]]܇]<{L:Lغ;s.g|ϑz_2K.ۇQ,j7D?>S#r_яw*\[$YutVuZ[R2WSU\v5?.&=YUit{V͓vx{OPE>AmӺW10 ,]*՞;%,eiKokEcg/(Gs[Bw!01"iK,cT*(A0]R^ShR\j!Ǜ- >z0 (Q }y澯6\F}Q*+}_w}R ^5{y?9Y/E'ɇѻk7͋;d3za#:jςF˻FӳbWf 磼_^N/ [7{:?Atq3m݀/xb}~ɼdT{@cR z5b'q>ɻ# q@fќ1^͐ihlstO_ecrU*1#/δw~:&E7T@8Je8) H3qkD!u[# ݁ ϢB(D1A;,߿_R(MVNǻlljs(cWo,,;YqrߜwY1\7eiAk@, p%LBQ  ˳z ŗ[s}heW+/NdεN7Kw9M^9l Dr468?4FVq4FLBtovaIy}]򔂞`XgrioۍF+ k8|ϵ+6WmEP4S }^ Hi/ܞ,7mj@?O]Ս,՞T3^;c =Mm ]\+mPO2Ht zhEtEOSzkYBh~tE(czt0Bm]-'6n ]Zo:]ϑ<^z۰5tEpf$ɯ=J'#]&G;=Ke BW7@WWn\wmM$(J3k/??iosX9LI:;0ᶏ\6%rc_KO B -N8 'Mϭw::g,n.>AG/7((OGWw8r*LuSR|7676¼FfzuԎ>Ǔ ^f> ϯu 㚪& "/@$(oϫe"f5^ﻱʵ+oRr1g_lr$??|[R&PUUItMVV8-rgq+2iB[{}t I=v}WЮ =nm"W EnapMr'YM ={z69&UVdLj#mTjcNm\?)5f/12EMYE[JՊ_iUEumܷJ0'(mZdjڤТ-)$8: BOk:"ڈ*`hX\*%6 Bb1C6WKr5j&"zֺXt):Q} l5ST:8S׾"27% >{(.I҅zZą %DFZ$ZkCuYviF*UFߚ y _V 5ȤuՅ@vTm^ ,:?m('hKP-"VwV4bM=A%DB>hAjy1k&! %:]%@_>qLGu} $$LES{XRr*a3m>%tB;뒄 X}t kR([|PS` &+{Nw=/Ф td!.h]i6 3+I@d(&J+1A2~ȃR!*8wG{gUEת7E¬2,,HcF8&gو.PBЌDUb|؀@fHgY g#MGhtf%ݢZMnU]},e4 %0l辠{tВ&ƐZ*mm`=v3͋%@}\m=iOsu&VU ԭwH7ۭIFOB=V`&MӿNd2 Ú{)DzIhΓDCakBo1& P^$հ-g' 4 IE!9)j~@'ڪ)-hgMJT%[cA;(Xf4Ш3) @)w!nk[ƮXB_PD4cҠOn F Eɟu7(bp0) -*1"@j裨tvyT,+~x$mDV\(Z=4)hcVjtҢǚA&EJIxɺyfj)څ'еuZ;z*DES$f ScUwU|AiYVT@; Z6i =W&Ex4CSQ3m2KFD9k9cm\4*w3NH`PrAuƒf5$Z!t$4us \- H_F]~B:CwW4"흩y0B.8! z-~}ؗkqry 4yeC&A[yl|ʶddj0h:w@P<֭rqm6=մ<&GwƟmhzmHKǸHp86b%lq ppmq EN'o\"!q9^zHxY;F :0.) !Vӹc+g?S}ypC%? G_ zXyV';N0>Mrh8tE2"Ϸ0l~׋eFCo,ڰC٘^PF7{3y;tow.yQ_ݏJf%3F2"Q_ߝ.ޠnc:Z<2)8u:DMx)C^aU "ȑA1w1f7{#CZ.~\Bc,) G@bo,XkMTWu'5w˨c;zwjE>vwt.7u#]d\ɺ{y)Q۸wU*~㯭JQCi}CZV 6^ߦ>Y/ypˁ1)3ZhAd­*d4mvyrzNnvCw.'UƧb26rI xC3>rlQC*!;m#X%gTަkmȲE.ДELIAUlk[IvwgLْMJj1ܶDOn:zX-EÜ %{@'u>% ,$L,q^yuJ#q2Zov}v%)mb*oEA#$BHk-鿇.)[E<#tO+Z}||~XV#1aGw0`ʠ\q2=[bMv. / ]ћ,mh`@cU (i>.)rx=Nr (1z)P%ޟito#M@SDDGGG;-O#Z۞G`i=ŝvGV Y)iq{[_L/k8ZlA&zD%Vuoz鲼C S=*T"V,:r^mcc0Z$n闍 ?tΒ&Q/_^7cћܠw0M<{ߏ)+>Y y[|fj=ƺVz7OgxXhidl4r^iN (XI3hN9xJ&^)eT#4*H"#ҼXh4/•:͋h5kEFt5/I16"8"r ]!ZYR.8МlDQ{Nb2yP ֤s 7LeeqV*/.n;N*i%_0CT* o=2!o@o@Hdi[)TҲzL4}}3M~W;R~5H5+L,R *v(J5qQDt%e4B,HWM+L#RW7uhl;]!J. JDWXxҌGfDt(ML"yTtU (UDO8O GmQ7CWfӫ>[+fFp_]fh6Cٶ̠j@WCJF+ ڴ )AH0{Z5DDCXh*jNӈ/!JҺ7&kTs-x}j.UsjQ.C#ֈ`U58E%ݵެiݲvk:K#X3(D" 5mPމ \Z&TDt9LJETBW%7]] ] Ŕ6,BBWVvB ]"]I%))C~e#G!4BEȽDR!#+n:,hϵx3ZtTT:TR$éU%N%i|p*VsQ|T;e2t D#.&hhTCj(p&dzęuBW\Sl~] ]I+ѕ)Ut(JY#UBW +C411U[m;]!\Mњ֫+@YЕ%BDB:P;BWְϊuoLͦ7}B wf3̱fh5JCEW]mzJ)i3k?]!\ ]XZOW뎮.B):d(m_1c8]ڠhcZ}SM9iԜZTV\nNˏ-jֲw-cQn[y3G#3#Zv1̨ ub0J x J"b+DZOW.#DWXh * ]!ZNWRҕd\p]!`+u,tiQvBs/?MDt%D<gў,f(J3KeL xeBWD]"sOWR ]!\Mfъ;mtt*te$P;"B6%7]}3tek6n^YMWOU3̋=TP Ҳؕm@WC JZpHS3DCW\ ]!Zz*P.gekޣf(>#D[ٻfAm8x߇'#_?,ʹ&"ߙyO\Hmue%mtOٯŇZ~OӀ|r;Y-h6@J>߆w64vAeΎJv *^WK7 "0hߎWvfe%؟B l{㦘l>J1|)nj{ܼκYF@暉̦ j9A,h< $і)u:w 3?:T#xNLfrТs. \B1d, !D+p ]בNۑ ENޟmCpYʤ ԷiMH.3mYm`$cSf)N}82{ 0MF맻c^*ڲ_e}li?n|qrW.0X4,Bv;oz0Γ$jM݃bV;:[N˄jЅ10"BQFZϦ#Z")7=7OkZyIi Bn>/gxo&N4-MHI{+X"h(_KMh˰w;q)gN?ḐygR}‹3‹^>}w\`irS;:ܵ-vȍ{lu8WEE q|ph+3^'ꡑK}jôa`,?jAcg}XtX0wc[k Nwb B7&+‹eL70)j:ݨL Тg*+v~Wd'x9^׸jeX~QUXRI@ d5y?}.4Êګɂ1~ 2#5Ƭp;΁6\X+ 7Zbh>o{{:0|jnvYtZ7$[۫g 'vh$苊7/\,F Oc&S%_8"= <Ҍ B ߉&)zmք #ٽis=OSnpWUh=lqHo= SlSR*M87xI*g0\.|`",ogGoeMĖDj?o`~;ê&grvLȹ왳kL=NP\ԔR+ Us`>-xdu|C)*jPT, (Z/a#Ќr܄rZfaQɲ])k0!CA9b>2T n|:\hm8_#dfƺf yl?]L7=|øf@Y>4_w殫.w=4CR UvYN!^7@9)߲Zg"ˍVTPrIJ-Aޥ26SA6P#MoǾ40D}3(/C{lN>M'{ bVxvH{yHKZ-|}u{ _m׼GfOLvk":΁j>DA2bCerjw/ɞJg:˃7`^3Var'ai.S$$SJT,۷w`U"7mK$vdp"YfKn7;mߜɿ\jv Xxc@ޛڗ<$v dܸ^jLp<|>܊åGd@'4 :W$&ͲRJ;/2/x|A!]>`vi1˒ZU^o.$.]bI)+de13#琌S^l^Ef~~ 7v*-.YT]k-9++w h0H8@(/lSALX}ҾDZLIܭ;=A5&?>pżn~s6xMl)a[x4La,/>zxU0\1B$P ˙-Yq a@elwԃr5pq%qsKc=#q˃ҟ[1EQ}lrF8ČXEԢ MBC8q[>[^~8]˵ze{Ճy|B~{:zY6e%*!IJcbIj4b ~.!nP*u~g aI&l~ϬN=#uW\omsYu?2qg(k#/MUo6l8IpfKn@$@AE;{`$8MN*iɦD”lrs%@jU䍫(t%+8T,O%EFp: )$3*m(YCE^ٍR4}zAsAgZٸ39(tTiw&J0ږI/8dx V+1皴pEJ #:MW*0(F*!LjZ U*'CYc_CjUQGMF*T,Yy-#`uɡvz^Wc~>z[&ZXL1pUT`KUE`aUImrmB~kzBp/-֍(ÓAt3M2iOٛ iyÔuJA֢! 2fJ^%R8mVyD'Bhtju,OxmBTQ쬶mh\lH:+bm pVTU&Rt6AU͑G#ejunٍ,%Bw"6ũSG_SbK1b ih.2/B^p6a<~;N @ol l Ӕ~ZoitsDKƏuR)WZ!{OYQh B rsI-d Gk[L(٪#9 rf`jPc`FGQΨ[wCZ}%"#ruL7'YvXu%"VAcEB4Ѫ"֭,8ʩmpYH8 icԢ͑HkgG֝ݎZ59J`xz jVxXx6*Qx`{>aBB׷6w/DzS/:J_֦a_JHר{'ocfcbq ̚W}9媭GT_\cT|$䓷SЊAW TKJugwݰJX2s,S,/@ޞîtrO; œx{O S"aAGfUW~ bKHnz%liCm[Gi/QaǙ}BP-(s&WQZ ]2lC$&Xr|aGn#oGDdډ<iEK."wץ\@ Ũ쌕q$M47lNH)[#jaFWŒGUZkێUl*`jQjкz S`o9HWymT34|1M/1Q [Jv^՝=P2.)/fk@v~o>]E*_m\h,etyCTOM@ڞOZ=&wh0d f«υW渕fʣ8C):Lÿ\w%}*}?q]Om>^p2X)#=kre`_j~t짷K|_V N,X@^$AP=䡫w 9ˇ|Xvs7DƽElXEoŧ9Y^̯_rS>3tu>[߷''˺6Bt"mKCެDŧ^^]||u+z;rZ&#Ht8p$5Xjm<ؿ-$wZS^QT*(M.>( {gJbH$9 „ gbBIV7gOxyФ%G .a'-5p1UeTV5h!e&'hlf =z ]Ǭ?]FMX*-B\!nWB+2eBȬ@2`H[j&;6 -Ͻ)lߌU>GbDuΊ>k*Gr_!Q/Tj 'Ģ<ȹ O9= q7 9;tgЯqӬ )mNח&âɰ˯ mzC´7_qR֥r?0X1ZfiݼؑB:p~@Tv ѡEoD=ex,zF\ػE|H#i((9OL^h22nO^?eUXIl>ߙ$׍ㄎ ydN/|fC8R<;;@L6`b7hMS( qPU~GTD c ~*H'DT: b,VVFzI#QA_Rq-׾k(4O8 m7>2/%`]+X&{dfOyM qwCd7ie/'ߒ\e0>ˋ(I[ב/:~_OҮ 2&X@!/b|zϦW|7s|Ê\'ɾ?'f(Ҷyjt3tm\-9yC7>7 7&}kpi*;pݞc@=?X+-cJwFױ;Z6<H F!VRcX8CR`s8oĠR+\]UJ`M.R+7Z+}=#9P:ʙ J56mt-sd_)"Z6W; QO@ o7u}7>|omr]w$b,Um ^|>oxGޟuR =ᐅQ,M-蘺zZër"Bgf±&jЙ)mZێ/?vW_6})?_]=]DW Tœ6RkϥXV NXg"Q"BM+Y1dP=#X**}o#7J8ۑMX|10MArnAm]d+ɞWݒeew˲մzx8LES?<;t8/lD@F> 0L{KS Pf2J,xkr  )yvC6|/qgrDrO"-23 L IȭRmX.yiڣwoևN5-6ٶ. 6(n=-p^]@;psç~goN,+U*U?Yڲ>3HKP@PUJTG+B-s MjגJ2.P\—NӸ,=|l}#q'9 A݃~R6{eEC+wbSRDJqI&ix1bɫ৚WbuRΦxq~Ӌ|rklf\zvϊP0FukM5V{04g;mWojΨVUe[^jSmݔo!m/LUWjH~{ 9oKdךozSB;mukvr.uAщ#~˝~3{fqR?)եjqZֿx|`ڙ_}࣍yg=Cy{ANnTm&TmDڍ"]vS6#j_,g ܢd5>W4{(OgzU2d2/&`=ֿ_"D"MMҷk[ #4v=kkysnoW֝ezh%ԉx}Zn5/3޸X4?]C/g5'kݽz_8p54Kͅ/,@Zmlmw]Sߴ 4Syut~nsD綳ەZa}3r|vsux|iQfL 2҂(+蛲B`%FHR"Nޅ{ߞ<vuwX hԓ|ŴȂ΍`Xs9r<>o\ RJ% n-0/F e :qxԭOڭ]{VE|Žsa{:Bbg쒊xWy5 #js=Z̮&IGO/vi L;IeQU{2O/lʿ0O/ٴ^_TP]#IIdz6:SIX:SS_~4l0Ӻ5Y9;zN9ۼS8«Cf>όk/*H/JrU?_]2u\3lX6߱ioOrR?#,vQ\Q UkUҸwQ-{f!3R ԛOr;DӲȕv{),c* "վBű!`Q+X䢀Xpj;X%€/+>K\C4bkzw\J':eW,xnrZ)cE;Xpu2N{ MP`<5&\Z'+V釵CDD"ƃ+-ԢٞW+H0J XW~2*wĕZcLoI0*W,WXpjm[Y WJzy,ߑM04Ŏraϸɺl2(;Jڶ%}3%N5-6oU/q=r8}9*[=/"ׇjx<6 om ZR#,|jpT؀hyi|,+MYEd`"jj=% `  ƃ+ w\J\"kU?6H$~ɧds#PQ# ᷐_/**WRZ,Jt܀gGW?_qvt#qܢ63ѽn=6'ݷ6D_x~MG7rqTG ֺErw /;M||8j:xNų2Pz`ȭSДǿ-?Zu;Zˎ Wh^[+UWqԄd:Nrš%JU ~\JcrV%_ (GJs_'jrevjiMG;z|_wvSlاz-.S:T#jOvT M5\\`e.%5oYez\)}U7Fł+Vu{R \ ,o\kd1 X-޺b:@\9iI+"zr!+VX+ViqHF Xf2j}"H%aJzu,'nݾqIzViJdPupmK: v2\\Wv_>T 2pV=FR'Jjq 4 0^n!|&8U9amSNUfaSa:ղCphSMP舌a 1rfn ʾW1X+XWcq pWW,hpr=Ƃ+Rk;X%W+#tL d䢎Ww\J&+hHh^ZdUpu W,a4"j{OY%kW+A,N}}Hy?TkW+V\`ϛA Ƃ+V ߄*z8nq߸&ّڗڰ WTM`նU/h" ޻#C7vdZ+Vĕ=StI7Sz4XzgM,N9k;MzKZJ;hzKSu!4bڳHw=XR&xte4stVeߍ^Vi^0@#[Db?s Z 3ZU*pu҆:H0J X#wSkbV :@\q4$^–3db \ 8ZjT/lYg2XEXpj}qE*W+kKW,`4bĂ+Vuqe :@\9ĴN5r1\Z+RB :@\yń+,!v],buvp[V>$_!&⪓Z#d]uSzf]zNAХB*_8FǙ :Y -c:@n驮-1hVZ%@KyED0 Dc }G+V۾ìr-d` 1tF+EL5(k'U_?Uܲ{K'ulzST]}wW:LtfpzuN >\Sfd4_LgXVZ*kK@iшkyճҫ4/>oxjdN;pU]zCu/wi9 I$7noc e/5~A Ux( ](y#E%!uF^:{oHA:"ܜLэ>#P9H AcB6ͲBBҊ¤?W [$Jpao4%( (Aay kXMm0Ui)2i3:99T jZd K64j 9C( +r /RQ6 oK e!2K=3c3e6u= 9^\\_I-%b^d -QH).T$K,) V{ RA{Դ{Zm) ATRFRk&x*rUEO:2T[Xß=C-Z'ת{'-bCeaAϔ2+,GPkR]koG+nڪC@0X kl/AAխ) }OEjI:dfV}SU[%%sn*dȿO02U 4TZ Ql>fgI`uA裃"GpG E4Lڧ]]Տ,"FNz$Y2d]DrUod|Qd*^܌S%Q[+DE)$ F[4h!ɵ(sRBR&}d SC6nF);IM 5ZJ!!QQ~B_+ H1k#U>).JHJm<𥁇D2TeJŌxNI"i"(>ICD ɖ%ܔ42f;oLeXgM(TGoOU7_*KɲnGdjɣ^f| D *1c5.d)5>9B|{Tд+G{mx2AA۴I|txM㵿(yi<olJ#KLAG= %WZH Jg0wS@yCs ~XA̤ƂBgsP$2dʫQ YZ0qcM|Oy }Y%@ VSx^m WX:?)@JTs F*ΊjGVBd*8x*dž[|9!]@*! -Q2O&PG]Jrm^Ls@ Q%|̡aLBP-fx  BoL)ڽXRr*a3-EΚ;@r0]Zx dɗ ;f-D;)f brecyp ( gIOGϱk?n,L$fJ2RQL ` ZA"78 8\-Jb, °l" wUϲMmŢ Y5[!7tXgH'Ytg& hH `f:͂ RL/Qr h ߤt #U}e0lmryXq{yo9ZhZ ="͇2d:3I?ҳhd0u 6trL'Fc0`mӿ\; eVۊnMZk QF D t#"BkqzҠDT-]*P22 Q3ES'2(X 7mdl"+Oۉ[QWTb,M%r,J`X0p0) :-"p#XNF"ÎW=U4+~(]:YQ1(jS{[NkBfaƑZ_XV=X(-S5e2 `2Rf*v!;tsrghAV Q)SS_xZ$SbmEѫ8dm ' RStija3^\T_ Lčr(KD9)T5]@=uXm(TtqHth .zul*)FlKC&"ˁ2:ff5d:$0xSt\hI5@m,?u+zgJp!GTQ~իo~і{qrnUOAi-#5 yd<[>Qekd Hǐ}8+Pj[H`7[]v)}Gj~\mpRb}z?~2W8Yָlu|t$-GG~ͿmˋR.Ip{++:#mu=LqzxCu0(J#T蟓8Q>_l@'γ%**a%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V*Y9^asy6J XkWuc%KTҬb%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V\%PR?% (WB?y%@/P $J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%KUIᄒH (`@Zퟺ[i+^/8V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+^賭^;cuEj[^_76Mv#D $\1<ᒊ8lK֊'/\^p%>z'CPnuvm^;W" |xA5Tm QLV =/f?7z{*ZAAbLk188bUZ|e/ >^,k8ȶn00-Pktuz1]SԗZ %!e/1&՟܂+IHCo+VL}= y{DQL~]BZȽnaA=_ofw'ggWC2J!t+T#Ck Xy=P|DZ[5J) ãp?ȱx+)A֢ [3P T.=EQk`fxej^][,(sSI* Z M6fmL"A, M.\֕BZu=7@Swц]fэG%E '=]㋓W \LMzZIk햠߿H6fY6]I_r:/piDrjyDuxx>x~qjZvS{s?dX/ƩCծAJTk"!M9%I-5ɖ{;7応ژ~asq)'zɩ*bC;nWK箮b}}`^;?og6\/N?]xo9}+>j#zϑjCk\#|\l|O>]V$VG :[^mkp ʰR~, #]N.~S]}[Ve6 yje"CKV*'2Y8x' Ȼ }f?<d0Q2ug~ ۢA>lSNpjJjioGr\ٛ8>)!]n9@%i  BgٵRB!DSQIݥh@lr4eaIFd7⬧aq|\fϿ׶z'&z-]}-7m:쯓[~r7'n|^mWp W0BR9ryVMUvl ο e AjsqI= R-niF=5nl[ e#XA3J" Y)kY>5ans]?[;˫]TW 7|Rnͣ_l2&WdzR4UO$~Ws1SlJB7| J/0NG];PẆz[2qtz}fu\t|FǜeCX[~! `[dZ\e@[kShu+e hk7q|ONEI?|w0ͧٻ6.`$$e5~Jb& "Iu _?+_?6͹;NohY?`7$Z^n؛IC=~]c| :``l{7"óp&KRll6e, m8iINpgBw H6itqsz5xr>.D#D͛2ey"QSs{DzϨE|χ3Z8_cg*χ@ dS5XP"XNcHo 0[f[~x'"юJ[%+#K]P@Dʪ(bG^ Z֞NZ ^ 9)ǩҿF Iz|%{<'M"Q gyy%¥;gsg,ͯ'Ozu~TL&6 rC2,ߏ&ô#sr>(bwD=i b:tO6WD!hYlim&WYUYs.Bw9;Ox@$V ʬw;_Xb q8c .#W[J'p_pJP혷R/np 6 yrzφ0S]pep."zF |Bwѧvhp n $?K޺|j̣WXp8-KOVF֚BQb1WbZc˗:!E;z)Od^b9GӰMU=f}^:oM}4 ;95j[1 Yj)-={sz~Y6]GkB9sr0Y2d`c+DUR𠢷XB#<:${T fLW2a?O}@BhX0J*(RNQ`]:ZԦg̜N:Qs&0%ELT.r!cK9dkHpC"l:Z F'( Ds_k녥!RcJIsA/9ˣTѾuczeLf`cAg YU"P /@cT IKӱұi Xc~kt"EUvgP|%w:eh֯_j)ŠT"'VyF]kڐͦavMNiS(r`L00ɉij'"*$VN &E"\J8.p뀰e)Ⲡ¢0@= k˴҄5Mg?6+h7 W y/Gpgn.@? bl=?&|)MQm~T.6 0TM9YC[u*rUX GTeQL4kU':Oh+`:%mɖR3:(z-hRˣzrV0b6>罥B$2Lye,wVz=SYg ٧DCH$o?>E{u85)]{T `?zPb`Pbyіa(@ IfwI! JԼ^k0^ +jWt]#)R~0rf(&'GXg%%RmӻzT@nL^!xL쀢'/} `A]403 !Xh0'(1:*mfH^c`01szR0HɢRY8J2Tid,6BqbXXL2B^ Yϊͯkmofa>? `6[:VTe@ĄɁ x( 9 +(meܕElnxs= a+DF#AXlfC$hSzt#⚉y*]L:Nyݙ<P9k0Q)~هXg2m}j Jb${ˬ-` jT]]Xс' X % 1a-c<0v0~. SCu>`oiRa~l { {Ia< Nvv4IS޿>|Q`[NWݓ&lx 8^OӦ_6[ɟwK6Yg bv03 ֳzژΊߚ;oݻ>Y|4O /2<8MCns WW%c Ćyŵ ~8S ,_hAdz:5{}VӺ5mH^?(Iz t5t|<-H'z8漼fsU,:! (LLo4teNC 8h}BeTs֦_օ#ZeP^̅J'} o6ncb3b>|rI98|Juujn=4`7d4I[}TpvggWٻ9tu~pfg.#Ov}4_+vny;[ϹCd \ gEͿkw(\Z qpNyKn&frmuWr%JI -noFk܇W#cFⶀNw`*h+AѢ`[N,V-uzr:yFRKھɎoL(2O e&q8wQIq惦2 g%=[W7ƬSrQ\Kcś8a8/'}`<7SJtySbe$Uıd4&a&4|Yf4R2XP\*ITς(v,YKd-i6u3HLFJII!2>q{nyEj 1 242G6j͙LI@O_*qg_9FQ "ᓴ \& z"@U&Fd_4)6l.h"vߤz8..(rpz g0&-F(.Y!+ԒӨsMM\Hƕu4]oܰMנ\l#C/œs6g-CrbVm#6 |LRN6d۳mW܏;My|\)6O&V4qd]İ?&:wd^z? +^ĞMxbYra Ta~ɧ*Mg~kA;pnwG(ɓlI{{ݾ㇟ kzaHu!בjugj{ɑ_i7A;Y^m-$!=VKlu-S<xl)v5T^6T uYsm"iumzboserA3&_f[hO94Y%& ; **7gj|)*t~zLt8h.~4L&G;#r8plʈ 5*+BX9»ИOo.r']1tvqhvv3$5Y7Tꕛ#k= 3}2_IєMK{3g5ųy{eDh }=6ʭvtf{plEL ʃ))FBJ&xJJ,p IkRā>?H!E :.+p (ݚ;Jr!hYA۠UT2WF`K(S2J/sOg<.Gۥo[Iֲu;Ή~`(!C2|Z٣B?/fw)D gUTTQz[} JfGM.9Q3veRJm9IBpN%2YJV":ZmDyEצp. skDp7t42 on\%@+fY9| x &(WQCP-kC$SY^q+ytv9$|#m[GWo{nKxt Q՜k@[.<ɜ STN"2ϙFt3D9Vs>H+$IE !REc)'Cpb#iDuI`7HpHHyKG&W1Wqi&3VBru%(|/kFrDyS\O7pO!KcJ=J '@KF6;$3H5Hn1=º0$xS!)@>JFƀLPruB;M=&XKŷa"VT,I67ΠAyMR0D'RpGg9=嬖ߏI Xt0jN{YN,n]r FH^;G-k N&G"PRnou=A5h$I*P DA@ R@;QyrA-NZ q %W@`(DdM6$nAIEs+JGqThxbP4(ig62^l I9c(n6%H.kuHV#]bcEϏEȾoU x *\;JGd* cBA<ʉ;t}-:Ayc!w0MLwI $r* a6y`&b2;}A%t.gT""EY9OZWQn`+'ZDtsOgڧyWNj|\0Bup֠ VK!:!hP Ν^*i wAb`[HE)9It`ڤe0 ] Cb ~SIM (Qaժx㳎V##ɾv,^bqǭaOm:zt L]L}敽u_yHYQY-qC " w^kA"ڐ$sH ̫`1纇!$R#X!U:(sbʞ憺1a=36*6)$9'",P!C-To}o}5 {rovoy#< lKh^}ιb6,2ЀtUVP%x嵦VDCzHglzKq %4] Im.[<ȡ[f#3@lAm ?Y ?0Bnݡ_:MnP/F^'DkqϠDm5x0-bMI6?GVmGӅ>Np8x\HJc\dFns׆wc#mv"t%=Zn5`>(~ { [K8?տ?h f>?iO'{,+&C b`!fb`kBGLC !fb`!>;:+nl2Zz6JÁͷx`%`>|rWnڋZR˳"a*OTN8S%*:Ue|0S^+c轢,JM %&25h:x()An47` rIa{a_+}}N=i;1A~?{}Y!Â2jD6˶Y)/Wu|!K]`Ҙ UU4uX0܁Zy/!E+胶\4<dL"|S|.w, ^:%eWmR$v: s[ p$&Q QMfp2.|6Q~ݺ}L/fwuIKExfgw{ˆ[=7-xUeb,Vϙ*m]ԓ颞^\ՐKd'iCrWMmBSY?$|GW8~%/d 7jY'3S{$s({Z idb ʲ%qD>XF,c6Hg3QKV%.Ա %gsH<"Eڅ +8 o,4Z"@'ڢ5=陘9lޔɕ|Mh,c%WQ)"UGV}Neq3bֈeHbiԊ2hL\Smb\ I$B8!(K& rŀRY HP$D⼔B傏*x/:Oldb>b"%Ơz2`RA΁r 8RrtE1rz䜺 [wN]yY% %?lnP㛖lv^3ʊ ,!Zy̒I\ VEup{3SHLf0V3hM""A.H2I DJ R^J@-$SFdneῃ z8NXs{WyqzEvt̟)@A ^Q BY!׎1AAIc}a݆-2 =EԳ@Qi=M*ħ?R %2+W/8}1kJKo(c;fƠ} T#9 ݍw#fq|_n/$qhKx3rP*L\}k)yƟQRSyE2W{v)^ts迏٬L==mkNj-f|_"U+ߔmk6/weI=3L/-o#c]gGߏlr5:X_~rس)3ޞ˦4 VT2v8~>;m#{\4~YF5A;fjn%\={ EK~lY._L{p=_V+yZ >}\gcMp[5&Zw{\/f61nOnZ&o ~m5?̖,~6Nixt҃ZBW _fwWIQLj̹Ͱ'G9`:iDm <0IMF* ezȣZOcB,̗ld es̭YMl)03%7d_oHr4]vo< EÑTL ʣMb$dA$@:E#__N߀9|~`7gPs+\$ ½h2RK\ É\9ЧjjjXu-1>L zs5k=9AoQz w8u>~cCPC>=*bvwZo+mH0F2QBřcf>U}'x|cvcGQ",8rC$מ;TX-4inU*FkW4qmBQkv&^ Dp7t42 on\A^>&ӞWTxm'n>䈡ey BފP$hݠu,n ^hUZ }ȒeW jм2NJ0/-MWC781 uoR^' G\eFY93ʂ{D'QƗL-ߐQzSSvkR׌瓰Iq!CTlwxDeP\hu Q*kY!ds^heU^rS"w"O5*{n5C͕JّTkNv<I~$DbzsAF+c etod &im , 2MJ(哠]TjZulǔeZ`=/mND&|B"~ӃU3]Wv6#k*}qINX( řEcBMs߻G=XUյZw@e8 :5jKZU $L3{wIK$J2!O6yd!QO^>xNFŧaR- R8k' x#bf6ŕ(7(V>o,F>3Jƴn<6&~Vvc2 &'/53)Y{u]ju\AK?|g%8 kIZLȊ܄qZ%0ʕ2!E" xvAR֭|rU'ETqmPQ8r+ͲW;?2~O#ڶ: eP$qC|ed!{U=3DH;o]wghq7mΆ<w77Xf go ~.ѹ hXxD=xrtxXg,"m7Ig JBS g]D\gq/_&"ZrYq9w%1(ύ9 PE&]J!ΈfA׍hk+T`8HD3(5g;f]h(rGUuy E~@nyйYk+[P i w{ic#[$ XNlxgU`wI3#Ig2(ȩr`Tkg︨q2!m߬l{7mdyG\GYZ%G*)*\ F5Q% _BliLǹa̹خm ;#ŖwG&E' Q 33<ڥV2w[V3_Br"b{[cn}ggÏO^fnuTbb&HcFFA'`%{ Ld:f+5O96 V8 [u_t䊰s>v}Us^믯Yz`VoR˃֨xYS 4."}"撷 K|Tk!{P%>sR0Zf f/SFD^*(R>qORxn8|ܥXP-.3^TL ."*e$ϭLiɋ6Y٩…?ѳ/jՁ-SNJ|?Z5=8?D`7]3I `.|ߵ?.fE_ w/20:C0|wH޴pL1 c /V8v8ti:錆4bQ,9$b %:0m斷=K~aJ NѬ5 ޭƟ.ۅDŢ/\;T{MN+ ַka _Fvk+ d7{k*Wd(N:.9e7ELhTL9ьяbQâU*Hц[+^q~(}Ql18Tw<۟| ԡ, 3mmR?+WI_L㮩/ehF%S,IRAMD{QВa% yţ.6MҗGWsi|¤RV^RMTƜbb.U&Q5N*2Wgu%`Mhfl|80ayh?U"n@KP@S"fg̕`{@`eژ`1# #bYRh0 ZU:p 傰.E%1 QH={M)DLډ՚s?O$nn w%ƷڹNhZ+MlYw>M>zLSEC|j\X6att棌C[u*2:Cl1iT#jCR{ޱNG)iMc<#htP̽ӖI[m"#z~5ƥdÌ7pȅyo9bYVg.r.{g+AUf՚O4T>W?_/yb˚!+(k 1fGp=$ R%&dByy@!l}|bYzr B c(R2z)x՚ERY-%OGnrWɎDz;Ǽw'6֐wҴ7V |RH ^hڳhh` ceXDcbKA0SFHGYp I#uqO{gt:E/92Tmd֜W,cW,z,|T,\l^1nc%prr";ۡqt:[:@+}P`1<9 :>3OCNh%!' Q-HP= A@M( &pF)cmYC,hSUG܏~Ptf9h<\#1.N-X[)@.x#9R\V"EA$MX R%J>:sI՚s?ARܧ b52"b="ޙ CK2tB."$XKc֢J$("E*tXSEG]n\D QLIj4>N֜-R}H=Y8>uV]qQUEbw[A)R<QHdN*2,0*P Nfuլc_P톇aLq޿Ni]g~DL{9}]>TIԀMa< O Ŏ_tzeoy(Ǔ5px?-YIzXsA"'^qv4jxznx\v]z)c X \c+\:\%*%•l+x6Ňn gyU,{+t/ 3N_yMdzyS9ũ~sLr,3BϨ>5ђbƅ^zq+V]yݳŒ^xg׫ 2w\_Vƒڭ k^>T;L)c\8?g`40?0q1ɫy`=a0oF. P1='LSy,,M m^eŧdߪZp*q%(Ȅ'<3tny$<{ V*cAUncR#)6gF\s6;˜D#R`(É\EN*ue8Q{%*T c&h*Q:o'*5c+ #t4p%X*Q;]%*18e'I*%b61"%, >IZ/~*)X =_X>2>cTjn>"gMDFNr3udGk4Rֿ[U=+[9X\yH4}/o~'S,NN< G#@GSy e5hsčс M$2AI$ \H I\'`v~'I#oYk}pk\6s\qsG33pyWPaYW밠(`%ZkK԰rO5E-;Ձ c&nT=;[j.ߟ=k_?y5p̪fj$3x-|1fYu"=]a&ny.rYVY-uAsP9(I>E>E>AsP9(zYsP9(:B<1܁+ZwQqZ.H!LR6*i+dN$W9`Þ%TIJqjҎ;5I眚;؁܇;Ѳ[z0PrE pCxļ\ mII$j+B9DŽRnȹȰ, 8Z6[ HZF% t~;?=Ǥp5qK"`bJ)\LGQa"NjxDXx\ ]CQa*nY,GGÑً7 028{ >XyBRCE?ksfӁ+roxG;T6F 4IsmϽ KM0{^$Gna0%IMg2aR l݄&6{慞;b?J[sjBH-KCOQ̈`J"9o(2/8 Pblrg&&W)udA-ÁmxCY5('OSxNWڦb}]O_1hNR4<t2$(I$DK "Hl9',jHpDH|ߍC _`#K*ˀ|#)C&PK5U`E*L7a0}?]>]6>kױRX_Ou)~?Z\a5C6 m}~ 2v )ĸp_^ƯLSy3lz{=o ټ|fDw' ]Rd ەHH$ݮ˦m-8 n6.{êƓIZŭCqd͌HaA?eP}|0*س h%z9_0n ~r7L)O'Oe< "Fh- 7N!G{M!obIۿ-sIn\(Ne{a{[nUmi hszd~et زȫŮ's"l'ZԄT0)V}0WE2PH{27rx`J}Ҽn0[L[XJhmGK~= 5ׁ8LN=͓M |~ yCdi&$T~(+Gp#`2\W*zh$Ui TzI 턷1A[1g5sCsAߵz`œy]W6цwffV$)Fޕ6#".i3e=3 a,%\LTQ2mҀ/%I2"caA %c: 9kb6J""$]'ŀ&Uv= o/RC8@tSɒEpaP\4:i,sԇzzClZ/[gCעST| ;/^q-C$2AZVOk {W}75w*d o !7}RK>eh;BtD%✳K!6dYJ4:KdΨ8J'Lrg4rz`J}7I)7&?)As?fZxt==j1q?Izpy/23F)RY kC+KHW6xY7?_\ >h3tE.ɵB=ġqw۹|W2 _ie2d5"곽16w~mS/|Q˫yFgv;M0?;k6(eIVvD>g2@ry2Xy_=˜b3G&{:l׬mM{2D> ϟmw͓ƲQiT̞FS;<-'d`*(}[xTfA-LQ2]crCh)DJ,h$!x˘\TC*Նs.2C/wC闫L#Vz)mv+-M9!ˏyfzN1՗JO/(UGG@_ErL1VV K0J0e,'d^Y=V=K^Q4Z 1x*y(ѕCF6\F+c WRs-7Ik? 3٤R> 2UΑ|.ى`=ZR</H$|B~,,*`&﹣wɛ7>5Njo93#o NVd$)p j0.rƜ2Nb>Aҁ$V5fNv;Vj" `2egdO,,VE})lm@ꕸTl581r*G{<>SbDŽ[>ˤ]DζTʊu@yDRIrT^+WDNe>|lǰ4`IyqkP1P*ud(EZ1!h={ck3QBN^;|qˎI6Y;* Hc^(63=X5dR蓉'4Ҕ$YHy95"R 17 2ِ6pW(Dҟ'<9I3,lY_CTDɮFE+=526k/R<րB{rӔ3kO/.[V#%$&uvLb'kT=:zt*m] L7uI#$Z: V5Ѡcd!6,Oh{HHğ D !tH)s.9b0N*ФT9{BJBHu)bKh-f(g ! A' :ij͂GVcGm.{uY jA#4 07m\r{Rsv=jx:TӿǫٻUR r}'aݽmry>Z_4W%|0!:ۓQDDg "z1&霡$7-%FwBYhBvVʶ_ޥ!~^v4.3jWeISv6lM>w{g׫ޏG;vI%}]-q6Gw5Ӣ,Pu=O}D4yV'fl\6nq"m9ue[wn{BPOXT:ԻLUpG)kb7Ҹu`IۆO : :-h\cel@FPߴ.pL2F0P랅)0MA3 (e(;U99TWm8e*^.NM;MnW\"nr ٛ' .ڎ׏@yՕdȱ[]/]ofm;,[;y)2 t8{3=aw/RZ7+WLfj~Q3ڮwoyqU˞ZxÕ~t͓t\}:.],OlzO4_6=6uwzcKߗ,G'| P,u˭_6m|u|M7(Xlcrt࿍]9-QlTkqyx~-:Y)XS{Ui^RixEˀ/Õ굤/5(Ӂ`n=_2<ЍR5EJ+4J ֟19r.sØsd8lcdvLeg2\O/Å+Å*Å(e"PED Q dFty)xk]hq!c[܋HJ\̘2KS}@2ph`<9#T`ƹlj9rv]hH|.>"s27ΗAwjٵ eNE2YLi|\佱>l^Yp jחR.zref^_bJ;xk2_uI'WorYn.9p+ـV*oe!h ̚TL"_\6aU=mP[Hċh1{rHaD$@L0UqNe\*=3el£pu;3僐p΢@M `q`^]mRH["8yxfoP(}ob+doɿ֌I`4n]L0HX V\}>,{zCS%YXMm$͸{IoL|X6k{v0^ ܭ5;f\`H9MI?兿ͻ4*hzB Zy@/. E3?':竻;C$8+7R( 4"Wm^ 6޴g`+n7ƉXg7R+A;4]tFj, mmKIvdB1cɻ,Lpnj* _HYKޮ^. qh[${qgi#Z-|V?צEs[4!~B~%rv8xC/xH^]"myǦAvyp8yeוm^iqB?x)~||W[)EGr!hLN4^p(`\_؃,˖ :Y3K)DR/+~F`d0 #99H&N݊{<d2c+ἕ#=LΔ,8]JaY!ǜbhPced!{s%W"$3o]ob6VߎsMw- 5@o^ȾԼpGx|9VɨI8}tV 4!4g]Dz hM k"%6{# "#~:?hou΂%Tƃk9USE%A?mh$$g~)ɶ(rۦll5տɮ"þ uUvb,ITcuBLzD.ǧ^8Q+x '*% Z OI\ W\OE\%jiUA\q%;zR#͟]\݋`L]ݏ\FW\݋ZQB\{+9c_=XoB`)&:qKAm^a眞#Z;_~|[̧US ~|[ zN9烙ZAǏ[, i @.%TtV⾋D~bZ))7j92TXӯk3W&jeSL'oȺ`ue]m6hjUjK+~_5~3.7> ӔiHY܀6oŧl5 v_:8g~# ZHHɈ6^<Sh(oLJ^R[Jj(>7FtQ2И(ƴ)sHȑdjE R3TAcy+sH+PR2$h m!M$i3 NR厤FaDdh)(-[PdJlU$KHyK2.NUR cZ8GS$˱ SR+2+Zaf[[:;[ ʹƎPμpXW˕MGBT0)^Ӭ̠h$!`Ť%"aDԈ'+TT!U6PiuÊ>aTݛzE9pY唵6jU^F\Utȹu9/#;)@IJ01 akPPlA:8pA-zҠ'ݪ׺\ 餣Ò&ȭ"6K/ 1+2,WuO {pmZ{ADi& c>a6,2uDXjc`uV[>Ж*fz}Y|XŇ3/&Sqa=i*1Ya\ƥҖHL\Jq2,`R9o0ȷ<`tٷެ\r WyFЫEXdwGФuPKqHMD3- Lq#'3Tb=f*Q3 ch'* %Wڕ_,sԦX,Qd )`)!cwA2aоb2Ta<'1N|`Ts fyP0vY4a#A29Pt@SJx->cͬf#C-B*yn,Ty:2 F%Dwԩ,?Lb?ǧyqPGD3f>Ҍa^¼d QSrÜ%Q [Zca"!SA"Ԑ<$RRccc$> IJ 20\)T`P1G >E$uL, Ƹ#aREbo"D4iÐzE)[w8sv RlX 'XP#gBA7<ajhtP.+Ye,:uy? he,!]Znu '1b =$԰j:\MMfV~tXv z!u=,$(wp)BSϘX1b"iZ+IobmoƑ>rס: {2l x+)oCAeKh6k|O$W>{xկͯOŁ g.퀆G;ؐvڸ6 p6F(v+Tt3o.7gsXWÊUzڶ[v=Bgu-կRdpx8QZ[\&L=R9qӸqýHW&ٱ c0BP*Izpeqr'2(J2LwZB"^ˈQbhnE tlFac_eY>y>6\n2Jޞfms߲Wyk{yCkXoe\&b:V}&_ޅw=->[hf?ەȽ-R÷wz v1avk,.u5Ժ\=Ldquq}{gZв{NϛƇ;;r??̧=5w+H1wyАoV=q]lj֨ɯY~Ӵ?nM$̑$Cܷ5A7&\K S'SD= Qɇw1@dsJ_<ذإf`X|E;"zA"6{XSTE`⥡!9K aJ"9o(2/8 "L&_F,B0#P)f^jʈ`A #(H8 ]ِԹ5jSzlnl72>@A;k<ӎ_m;;Ėsqr$£HK<#`#K° =:!8GzwN(T@-㬂Kp3"5'b>XZP0l\lt9`Ϝ,zZٮ#pH`QE5=L`؝Lx)O n;'MqOBHA\.@L E#At֘с M$2%D?&gvt3sl.O|[b}y~/2ϳ1 uq\}7[b9vӫtͧWK޸Cՠ ߓ3ɗX9N q1[rut=eQ8/ ~?S_lֹvYߜ\E閭\wP#2MuM##Ć/DCSc:YT+Gӑ<.iir?NU7=~M}ƤPC4WݠY?2b9ruT=󔗟1X Q_ҸgN]]b'?oMH_Lˁn<`CjmWm팶m/UzvNs ?M#moV4ro3Т3u|t\%mqYw/GD@/ g_R$Q1yhY+/ qޙu5>%V\lۜh~Jn_6T {#XhcAТؤovw2~l{^Lsy:=?:nxq60<_+XGjr @֙y /4حQ :akNAPםA=7+6C-<6:&6]ܛwRTDD`96S\sg9O*9q"i5hXtظ%E>Fp'F2)aWƒTO{y>A2Ď@o#s+$I*id+`8ŋWƑ iGɋ {/N8Y-E)%mkOR7"je [bZWbӇ2V uP.vumu;"E Rr ϗȪ̮̰4NgzH_!uIcwiL1=y.kUؤ#h$zS.g64Uoœ%=?k^mroG?"kqiq c<[ qgm\Ef[Yj@~=Ggey& ##?d:WWMՄ[D҉f|ϣ#x޿L&mCW'ļ͗!$ճ7wlU@f)\-Yǎ~:ͧa`:e ]8mȞ; k19,|;ʺ+rg~'+Py2kdl]*3y|:ų#Z0b '&4e~W?f\]uV4a/Jw6s,13|d5yGq-DTY!wmyg5+fu>hf3Z>uxgM{pn |[SƣP B 0'LQHDi +"v:Q綂MJd -aK|f&ĊlVNAʆ,"B%,|͝8*3X3:(5J7q9}b8!C%]0qE4RАwq>F:q˼VW7Lq QCQy#3~/zy}4iZV ei}1)~~LJЁ [tۍ6lDhhSv* DȵNyFqp$X(DBeFqP cP|Ttzt$ә1lѭ?ae ʶ]:N;?:?JFJ䢘Xr̗`-ˢX 2jQHXεA˘qNw} ;Ѵ[}w$_?*h0xqys~;7ol-%V͢Xe:W-c[gm]xSZ&jy5OK6;2BmATQKBUKpg<,Y$kF^]c66EǮa;rk,Gn4iǞKU%6ƪڒK# I]kXbPB`[nǶ$߶%ٖk-ٖkҽmiR l/u`dJJĘAjQ2e|.`[~%־5$NWv^,V^Rݫa~N%0I{GYX 6 ,M(3CںϵE>BTt_bwgnlٮlc;\6)2Ƌ5H(=xV:c׭Eˋ(8\+$s0l0EˊjREXSbqVDVۂQXQym:9 DI9[+D~AX U>'7A"a-u[i9SkZ{LJR{L^l0%S3ޛ(tPCz)Z$e$@IB!R*y…1NIQ}+kH1!,OQm\M f*+cC[|%:#lDIX0'_?:(T-WZn ȤJAUX,=0{d4/WY *P_X)*x dld"\4]ڋv>-֪(Π#w ]4R%s\+ށOiMFنJ& ,]U^f"!b!P{ȪQfJ "0j JZ;BA#:HT>JYa+JSX+H&RLl26h#dz3r,/]UPCmޏh;P?z^>kcP1wb֞.7=ƧM4! R@ɠKm',9oz+rFp6IL}Aθc_P[=h&@&*0'YCd2<b +QC:C6* Y3b+:lkRQc}}Yq̵[$"+huFx ǺH#" 8 0lMdde|@LȘu1EI5Cf#AAt΂Ƶ/ dKřBufEf#[HFz1ĮS:#M0e8mµOθd]\pq{%"4qlZ7:I('N$g(REWEpq/xw p p|ڮz{ 碱mżFApC㎢-Tۊ~\Ͳޢ:p c;"٦`&̲)!4q^&qJ a2=]ޟ4Ұ7۷r*1 q8vQ(Fe5fl;+{YoKc$̵ttGokEc3[C >NΆS)9Z0%F%e)mꂏoGs}SDT>ڒOLݘ0NH-lVMJ}ej }T"}_SoD-7{zv WO#nIԂQQ {֦>WN=4@8A7pU5/p¶uR`WZ cVS͂%~;`uLF]3~[w>B6b4H#!G!藿ztq6mDm5Ւ5׮^jy%q;PѲh')}Z7🗓t 2yb>&]"tv7|c~ar tbfFR9NW 왢_3RuT+Ws^OŞRpNX!Oo^P@H)CCTګ˗l6F 3̮!YV%Sͱm2Pi( \SP2 & ACa&2\u/0bp%*Gp%A?1\Ur \1 ̾UW/>`%Do઒ JpUJTO?\}JtWJ\@ҨSI%ؙP*ֵ7J\@2vU%`_R>ז^\YL 6 {W\׮*v0_"\Z铫 U%zc Vj5;\U*=r݁Nlw$rI8i*iT[{\֝z Z`7pU% \UjwTZ%Um2gJ5 GjkΜ'ZP)k̍R4Ejfjq2[c*<>~5"i'u㭒i\2 bM Ig5̫쐼h$o2ɏzpQM5+nח} eo/ʟUDuD缔'm0]`胵Ry-H*)YQпػFvWqARfp],0vƅ$Jvtߗ*?I۱(өq\/VICQd$BYt?=YHST%oD*D\1r['[EMeWeqͿ嵡*]ތ/+ї<xվAn4}4?w o{wɖUh=NdN}7RaVA{ҡ}-+[>RiP=Zjjj%桥GC'``\#rpԅƺu5 Պ?H(TS[@Vto6%3ChbA9&pJ[J`+E JqRY_C^}ƤM C 935'62 3YHe!/uOfY MْjLtͿأ¯T-`Q%9O)xaKݭ_v3wO凓- :SA`fa+^/Y'"t$7Bd%Y Zjw|5]/42Ը3wu]^ڰm))Y!Gt$PJdȁ ![mvZQP{Cyd]_ǃ2ֽf - )edͿE2` Běc{F"_郶?д!zIjae"Fm%W(ЂEcR [#T9IrZ& ֐TE0^E%ՠdb{:X-)Zl邬ٻR\,HNke??r.cZ8iM?y\I |Tu?wN x߮f~ߋAO_gkG_/Q#KYQcO.NIs=@]!f)xSqbKءB=IT&Ev!eȘ4fAKp11CxxՏr`9۠e alb}%VG4.f]^ѣN<%S! &i\=R}N?5Ӊ[HM>iAga6v'{=.a-/iL<;7]S 6P!e ״ j%UqݫYFz)}]RR $Sfen \}: i?GFEZzאd>$L2_톜;9x|rہ1Dz)'>_)6[;' EBgPȟ74G-f;(G =*wv7s *("a18Y'=BqFq$ d ɀzgPD(&"\IVheMP7QȔ*dHq8U.NN>oxͮv;YϓY<0?|UZ.i{vOfޘ(J&zIot*ʊ"vnVmb 3,,L8٬H|A#B|;7ZhVLjE\N|&UIxqe,0s1NJ@`pf>q]<,hlxq;lʗ iL 0:X,IEڒ?*j`ֲQk|]ʡ0-RyґI7Ə3):1 p~z(O?/xrg7=[H'SYC >|(2SlQlUD^IDTlLlb4$ I5%K7)^&X\R 2FU-8-x#:&;>0rVeZz#N\^S^10bJ ݞY^~tk)W˰ʢVAw5ױi-+tBЁV<[;ԃ%rYfz9q`Ggqqe>XސDC.Irg}MB0;0L06`\z y}^oN&N'rHMM3וma9KOX-w}!ʤnG߫-ɖqX2h;@BO.h:ԁ4v(8ܬx:厇$˛(8\+f9"C.vcX谤H29mK UPŤNxb'@9^&P #?(XșI֓ ٍRr|i9Sk" < mCVSZ:TDXGTZ0 V R r<˜4ֶd JR%xZXJ!F2Hմ6 Ի`$] *:5 %b&!2R9!zӠsJy]kncQ)Eh,aYꑃ<\e40R2x$ƎƮs^6UQ!R@}EX`V5:XcRZ :۩ƶ-6?*ׂrP+ZAɪJ!-DBCn\nm)TR䒨mwK.;eQpFM,B4Jke1&baY@֚IF,?"ShQ:-80{ V|A-7x/!}[V;2|c`/Ĉڗ .qعCi7@:)uq39djoZ<ub/%"9$Z`R)i0c-ɑo(ҶZ r('\tLB^d X.$CMm:˽.`(޺-~*_D\Gi> 6*}:O*PLZchK3v1-Mq?Y?Qf.+SFJ3h("2ymlmCw K?IFYfzr`uTҢOw2ַ32ʮ|N3m;Tt[gkwlYM$^\L=Y脨I8GDA_q`h[.:,D%FLAQR%gP{JK) Ngl&ͰJ3_L3/T/|R_"-KsߦiQkßЅ|:= .+cݐW eL:XDdUS {"`IC[.@JVmj,*OX 1 XbJ11;Rm^JF(QW7L1fȌ %0N:#"YkrjoȤDbR1.L'R1GlzDhaG7i&9TA Q)RjGo)iAkk%XA1 "/ۮN aR*DA!.%f PTLW46g&g_ud8uLfZr_-@g>(5Tm*,+}15@h|!qO6ӎc0.l`ߦ|я;b5׸(%w' ُ/Q;Tُw (f^Z,)*ʺ+Ek:l z% Y?!{?g%k}̕o_lt3ANGlJ;5l>] <=CUW\=M`G֏_zW oXy".miVz2mr =&bdb$P@^YxIR KڛQ%fbr,Ezٓru>r zd}j뽙}v0/Ca@:Ӝׇۼ\@ tuwт8ut*7MPgUDK >&FOQ M %*9[U[冨)k3?q5EAR)p)X ;$SfV)e/In~Hp/g'\`+1Q K5vrń\k,WVG n`5 5mU JB^V,囑DF4Q!3 $rŐ*ŽN[i(g\yHbχ3mcTPz(`qcE9꠳)k6jU^F| rzhS W@eDJ@@?N 0Bc 8HUT-7vRo'=bdnRH'DL ё[EEl^ Em 1+H遣`r\{FTZٷW][n!'Q՚0aaɨS:f&0)'czikDh*>`ȾrĪ_0VmlqJZ"U0Tx\8,`R9o0x'Ke!JiY:K<ՙWhMEW,qpHMلbѥz@0a3\XW=&j9zJz/# ҺobYT>AΕe.2s9g؆4<0smC<V0>c`@^~^s;HdӞ{֊@U*7Z0>W {FɈf^T*Cz ́Rz\Tknl-"ybeY^d,e9^HA]% 0t[ hbzͱ7ȘS"]a#~p㋋()H+Pʥ}g`XBb]BQ+8f^ջpiV2h4IuńC?(tW +JHZ7Qt yGgg4wʐ *fcͳ[9:9ZQe*(,{!=M&x "Kv*0`?/>*诒 niU҂5y_*?~'g:W:~'Oְ& S̋Z+}'{ŷyfr]qΧ_miX07YPMڮ YkK*I;ڞFb>&?d*e&wrሟ`z sZD) ,o+L}AX yl{<=0rΜ|w.U7{c|0xRUP-}TOJ:?Y+[a3tny$i9[SCBQ:=ʝ7\ P*CϘѠ"Z 1b"i`PKwиi?rG2u8|yesh6O'P8;ga golZrmލ>ƟF_72TCP~t홇 8]o 3}_K'%K\JVC~`U'R &}|U!?gsт7uθy2sϴ؈ uayIxrXaW&cEaT&5F.Umg]  +Eݼ%2ZG*Ezx0k5f@{-#hFs+Rwˇ}0~eС۽x㖒*L6{G.ܼjhJ,%-ISȀ?.&oi,Ρu1:/[os4ef!̡fnMﭞWnsv~[[|@Z σ, gr\>zG՝sfam[i`.Vm^On?>+,6"ۜ#E/41դ[H\FJا,uk\Nr3ud ]SmKY73 I 0L;YSsɄ"RK0А`0%pΜ7JGTJlVDӖs!?X'&g~Ŧ \҆ӄA|h]k|sRw{mRIW.ñ@% UQ:T10`-##ƒ BKȒ0@GY'nzwN(T@-VKp3"5KI ONDpC@,sԀbc6mij`=3'Ғ^@w Ng n( ;)JǕ jta<+x:L' rR /SR|W4X| G׼gs ")̇KQ(u-sTe2!\}-{ҍ{r9y{z=5̄g0j>U$=01e撘"}6nH91‘t4 4Q4Y؇U&c_bR~!vx6UJ.=11X>N=VF¨iܓGkMur ]Nޘ@ f\# 6ŷo m)w5ouM5uk^S89 i xÁR{Mr;ﳷE6#-)0ɏ#K.lp)r`ΫBqtNQMEmEH5stΗ l>L{3,7`ݝa yDSڥuU}K{shcF,ޔdd=Mtxۗ&WM\_ob?w@1G#-qkտb_|~ >{` uOe\֝pp޺&U-T,B6LP(Q]ΠoSMإd<͗>.vvRj|"lf߳j;glxVM,)VKQRc)H=A&T L*bӟrT>;0 |ZkraW8v $92D G W2H%*SiM-T Wav@/qgAU:n7,'I[twrvƒǪG?0R>b9ըCu%`hgJ&rJ]DB^W2Qy_W֕\v`Pg*KPW*QUR^ \I+ X0JJj%:x*QIpW/ИL \%r \CDeX.U"U"wB:\%*y>}ҧvHRծ5O W;KԲ*Qm{g鱠kיTɔ|tv}_ -nAf˛N@O9=EtUM6"729Y=s SZ`p! T` ww:L_"L+ /a[,{63#oǟ9ΌR8cz)dG W9u^g矽&n5!V T|H宅SaTEa62r C͛lq0 *v2fkݏ?|ۨZVYaԯrP"+K`RGSJmΌ6>?Dcr0"R)%=IgD ]Q DbRBBCpf3p *QTW/bw l ȥTtZ.ġU 0GW\iJP ;@+p5O&+uTb!J]%rygήNW)m+ XsJUVC+%½1 l} ֘vDw:\%*ndJp;\F.O WQ+nT*~Xpw+=^~:A*X㢾 ` D|<=.( `2`sջ0iV[4ra&'Mzǯ|xͽ b~!Qm谋z?Q~ꘌ6¼fA颼!lJQ2]\灢K,2`ҟ|xWɉ˵o*͈4lJK߁"kY"Mw_ Ej {.؆li"@jsYjcM61Pᷴ1I@f6vm?lƟFTq/&ST5N-—˙ <:0֜f~:8tl_65tv/i]b,a@@v0Pt<|[{4Ofj@WVvZ8G8֩@m8)*z37epB:ZNb7{-oYm( ˝IT aTn`퀷HmLӄ;N}N\E\K㳀[\~aa$Mp;}dhxfC@$;L0`e-8{o[%l[ARuX<<|""$ߪd 7W|#p냏<,V⬢QG)zC%a/)3 }hh-SibgdfU}et̑'JhϢJ`af54OVt|Bv+piSUYPE H%]W1vdˤ鯵* ?+3%5n%cQȋ+tEQJm9㚱Ty(A5Vݙ&+]^ߣ)؈lD/"6ыhnEm;s"z9 K ]!S> i$N!3+A&#T> .S)"J::C(3+I>{.f7eWW뎮ΐcTҌ |bW5Oo-m+DY}Ps K ]!\s+DkH Q.vute&$'uΆf9զt( 2UN+lH>tpi6 -Wm+D):gˡ+gӳ _wu`MNLW=fŮBHft::)@6eCW\ rvBttut{ 0L+̨{1FWڊ|UT{lK+XXƚ#-l(ie^$V~Sy V-W"ɈqM[j< w\0e] #J:1|bs+ 5"`pʅi=]!Zގ·@]`LC ]!\)r+Du QM]]II(V<BLbfSe; %#]!]TQ$':\Y.thl;]!Jݩs+N%uFtɆ&ل-k29DeGWgHWF d+D> jGVe=)IGWCWV1r,P;•&>v(U_]=_3{bg0DP9]誶uMW[4JXkvnB8VpM#`M#Sψ4Dw4}4m 1}I=wT5`57i2H^˻2S Q9'2ͤI`uap%E!Z.U;Cǵ4'BfCW\ NWҎΐw`+VBr+DI QJҕ4s]` ]!\ ]!ZNWҚΐa "`]!\r+D[Qn}9ҕ6J]!`3"4uhi;]!J!::C2>'g|!\er+Dk[[vte-eLfDW|Ak+@+Xg%]}1t%lzqA$ڝAtpͩCDҶm[&իЕ s]!`+++(ͅ-m+D)iGWgHWɈm=} |AI =mۑիЕ,#RTG "uhm+@i4 JS#tN+,E> }pEWVDSWHW1"EFt-5U"Bv8!s+clƦgR0h'xlW5o0Vޢ>[e3(N>(А%Tz=;@\Vxg_z$wCLK/N4Z:ju5W ݚLY6J]\={-tTWn'' F SU7Z݅~P'?a_vսg|P a-U>£fxλ[+9ɭy@ud/x?<--Il5Sz͸Ks~]Rp<[?z[؊0|>}-M~ֶ䧪X7UFoTS ;7E.n)_fI5tC'  u42jHڠ (TCw{ .Oqܛ}  .zoKxדMh?'q? >0lSAT<|E]vU[`udts^/ׅoݴ?άZ<~UG5\]ŌOhbӛ{761•k])4ҬPo߬׌}|Z}P hzȧ1 zֻX^N&zU y RwkXn1KY1@z6)Cwr3407K zKbKKSFl,.{0B S mrvb &~z@_$' pe6SV~&Q9g|[N9?en>{-:p[z׫s ;Iq|>8M3m1 bJkQ[?> ۅ!":^BQ4 zԱz ~2k^ qdYma1:Œ&Q:J+" TzkV#@ ir^ U*j! 4c" 9ZHΔt%⦤R9KNPNsb8yV!Q/X@FL¦yL,&vF võ-H&JYy^7_&CMɞ켑 O1ˠYH*"LrV&"IJJRqYyK%ݹ/kS`o7-w tn6T{Ev͉9Аs%QS7L/ϰߍ"8" !)L}A-.:g#[pA.='1j1>:ŝ KΟ5A ]T8h&(uuiTIMS$ (351J`  Tk]Qk`*QfsEgx&q l18OJ8LK pk\Q:Ftxypr9 mwCn旋{j5E۫t x~ qIN0Z˨;D3ؔ!1o4ڃL鬊먋\88`Hd<3):5c+S%O\=18O:M׏<Ͷ[gy >rf P.2 J׏Zܵ1{wVVă)G\Mv;|ʳ|į<y\GӸg9%W Iɨ90-l /}$-I88W`lWF;-_Ʒݾ`m-zc/[y&|)(%fi$ 8#8KJB/9LKb@d'_oG ctg뉈y|E[?>{u*P+A9+"1^k $QНe,KcKo{'tĝVn Ƶ@9KUt™?>OTIǖvݳ\ƶsY*u?]OZI.]W={+,;ĞIzHTLႀ@X("G[>Cx!-! yq"88ʱd FK S U)LUGEJF8BKel܅F5)QҀH# ^8Sr. IQ2GS$5icpt,l")J1gYt"!x͓o~>`D" Me~w=]4}Gl.6Ag]>~:Hdi6E-yH:CKUP%w #,`4Ļ*꺿YaxQCOKSq!)WS:CetuWM||{ѱNsD1j &YJEJ4':(z-6;mn=6(T6~ bT21C7^P!8-"RՉ%,wVz=SYg1qvuDC}eq|1$=:<94$Frd-$UB$n(ꏞET*9냐zіp$LgI'IԼ5*'j1qOx3kȟMrc9Z(6N؀\C3K>✧j JbIeCT H*PCΞFHb1["q8J%JuԮtblF_x,Xl|싈0"{Dh 2(3 1D6VpBB lk(!*Qdڤ4Kg<`DT) &Ԥi*t1qvHKuU{iNc,6JEUe=.nL"$AE ii }+ q'Caq,x@X ]ܑY#wt9DcF1O2~|*GҴG|7&ՋcM 1 Fh~~Ey<\7x7e®FXvpߌp>)[\Nea yXiߌfäT1;3yo| gi&ӧǜ fp%0Myu9lկwOFzʢl!-ϳ׏:{O<:\5WX˧KTגLݾ+1P]$LeJqHy&erít^(Eg8Ip>tx߄]7^6~v\ @)RVN:JSUԪ4vZ9E5J'M)|[&3}Ҕ#$aDF (;(-b6:d <EXV$#d JW:y^X1C$ ؝$#]FK/%nmhp>iJ9*spG褬LU%<QrFqe3~ nS~LT>$HF8jNh*t6gt&sבX`7I.eNj5Ʌf3oZMK5mbΡ2y+ ~L)Wǚ2[cLD>3 XG(*o'/Hc餥 ϱ#cBb}!vg|Z6{3|`\tܼPK6RR@B:cj!}4JHadԜΝkr/z^[vR9M$JhFM]ДHP0#X aT1P`, K<_c)xkA(S:ƗTJ/tDiA;ouslmhdbְ[?'\{vDxϴvH**Cy^Njf ϑy}K+R)C({,JGD&oT-ZH\фDEN*!*arQFvDc 6g&mlx=ő9mw q~KX݋`O}PbF}p9q/F˗Z6i3xΊx!#Kodvf^Gӳ )沯큵sC1RAOۤiiqI!9"ED)Q)#,Jrkv]EB\ aiwr'%.hP*/㰚aP^ᚐWUu >c>{T?aG܅,M#~vwפkM!~קWC|vLЇrH&·^v+|O1f>p ߦ3UYNHfjjq :nOў}H3} 零_DlwQG$!HeP^Ee}v<ܦc?bk̛2v޽rVBnm: ccfW]&Bε7W()sX,]ct=/ztK2wi~so6bK.^UL;ԼVrxF[ݬ ڼ˘);ǵV֝yµVo0V]EEw5څȪcK&SD?IFsu'9U>YGX*@bW 1K>@bҨ>@7 l"5q6YR$x3ujAѢouc"yAb\e$g7RU{Fhc6%&8q;Pϼe_B,s͢XN.wl~涒r7/NPA;Y Y|ys148yȯ|8jt밚؁%uKOW/Шǎ ?֋ӯx 8N%@ w$`Rcɼ%F# 癠SxF9 ==|mȓ'2" |TR0I$[͠Dd`G$B )y O. ?C-8ybx^kPA=١ā71_Ӝ&#f,2ddQafQF&gM~1̲]GT!C%VؗټlZ,R+a>epR=h ,Gʨ]u5Yqkޭj|7Ac&~[ 1zꅁ,\h:T8 n,JD=- 6wV]vGYw;U4  섿De_L,pAZ=~]]oXmoV2mYrW~8I|ȯoaIY{e6;]wnX78o-i~y[M!H'Ҹ>wbbrYc蘸cFʚ*(w7bLg'7f@9誖,HNSh:PRҜ@JAʩ" #Mc)U>=2 :+p ᝫݾk%-Ё;IDP"80:j&,&m2)I(ԯ*e@/f שw3ظMv^wga @k( OAhq_.W?HFZeZpkC-}TA$w>4?r7hvtn+򣈯Rx9=JRL$qF%X2iP:h3VsTm=71oo!/ͣoO7mA?6+9# l3iN`1ILW*m*rך뤕K>ړٺxZ}ަD(=LTDJ~_U廗դ''>#kJk*+*O<EHB zzvk%2A=`bRD-FbFkRKnmݔ搴yʏnȓu/YuBRߘPbҔ!8H%qHٻn#WyN,E e7`,??AdIl_z>-ՌF7V,#tKV:u/ZSf.xC!ܲcGbo$3YڔV+g?>enk;+Y;p7z v#R}Y-TR̍IRpIJD~Odr-%e2w)-ϾD-h!B7򌼚6TFU+fykUj>T>`NbdW̹%:Y2g a< W:4{R T5eTf3mJ"^I|\Wjel2j]O[ Z_8r|}@='4Iɧ@FGP\M2}kf.2¤nRe}I{K8s9xZ{ : UD=vWd7؅8Nj n1%x*Q/=nQ'{Es%^_mĉ/uӞBǥknBBW,HE6_hHw>s.ףm?\ϼmGyғg(yו|]wG/sXW{%? 7W O>#\qwx/[z)1w}WzP~L"C]WF+$r6ɟ3OdLc3Náć's.?NtF*tT%r:DOGUW/t$rFtЕe:R:])J!HWg]kσОRKzPJG銅o[븅Fkx^]w~kk-xP6$O:|^ϼһվzI۟K8w |7\O7?ڕS()EƟ{ݙ|t;Zok|Ƨ'|_r$|'W)uz4oӯ턯j[⾸L=(08|3,Oh@Ey"Cu~ưliyQW@?ql/<4_g|d2o>c?e9R^ ZGYkeMJFeQj%+[H$R"~$rh]?_w g1 >:>@{7?+ 1y7W\FSVkr#KHT]}7]|-j$S7ij0T{s GzWv#sG5>"Ãv"|m,BKL{s9^4Q蘌B1NQj pKDSGݘXZCʋqCcINV)IV@1KA̘}"D34 & .R챻)eVKy D4!cGjL##Ci9w(hWYk}@4pzjma0K~BN"OL4CkI^\cNa4߿}U%;R;V׸eui,rbi|'9L<77!fUf[1Q{+3xM^i` EJ $ uu>{*'B0Y; ,ܫT8: GѯkbBVwu mAk>KPR2@!fˈ)uT="gsS"`7&X% zUc w˥r->HUWȣtrZȥ:6:H #KG ^'_yaB.?w5^6&VW濇 T"rO170kx2hB[rNZ y*!-|kAۥ0EO @'qjI0Z+B8Fn-Jm ,(YweU\ڈl0#GƳ%4aJ`t9 bo&XvQلj8j PѾ#nBu(XLXWo=.x X\WHƪ)؁,"`6ph& dT :؆OGVhP *39a@-AŊsb @@JWчYdW LhDfH57]2{Am ±YoYtˑH'!AYTg ҲtkRܽPE }kk> A7"g#i0!w q b 0=Θ{/ZI!08J)Wt n,l2(A@wѦb8;E@(mEWú_Ʈ#]yD)Z6򅶎oc!>J 8g'!2a)y,-T{c<'yDwv Wي)@iXAQ=]Wit"B#N(ȿ1`bin8\w{^U?ozx0SXw VYRZ$L>|V@uPmp9rJ߷` ޕJl4BC5U D`^]d `n~"wu1,fWZgU ]Ad0ڣ7#w*Flp}Ӯ2cc6aפDu2=tT*h!{i Wpz q VW7x>yR-e,蓧ioa]oJY%֦O(׶(bGi3Rz\9!1xT,~x|*m\}DcqSBC2Omr #sJM@2z]TAjP]O}f23!&K@P=$i)]gnMP%k,/-Ԫx0 (s 0x@ B ou`c0h9ւQ3^F\Y>r črKFڵ0zGe(mL3=C_ |@<A4\YӮ1[Gm=ڊۥR" ,uT2h6@MoEH8$mB. +P5]~E覨OW"]2$ɹTz.zsyv(};)Dv;12mݒ_М~UvV@!8T;zמ,kugYoݴB+ߵB3 8nRO (s2' tE5'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sus:qQK>'4 gZN ќ@/ 9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@/ v@87hYN I @ dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@zND䜜@ R8'PyN}('J>7'KtkdN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'9 dN sȜ@2'qvi=ѥ?֥z9]o~~3L c9#2xq p|Khr8yR֥f\z ƥ$NgDWp6tpٜhK[-|.tN0z9t}9+,s-;uh=:])hXDRp~ts:Rυ6w()]@*J8fPQWxGt.thOTlJO+}q$)OէM_> e>1u>s^-᮷X /wtuu(~?391wHkmH !8vϗ=!A?%)PW5|EK#Q9Hzn&zD%~yMo>],59wdAiAuK>-y6(b4J*qns{)&L@I샀,ns1z ((16QsTvKJא '~Xg\C.CvO^dɮn,EQK/2Kj)` s2YD"Y ]CFW]TjI Շ>ZnNWR4[4nmCNhםkj5̐F'e=[BӢjz֜'x2b[d #/BגÈiy'1*0W "Jִ=H4m_^2Mȭir"BWPCiGWGHWR3i BV t(i73rtЪMPB5E]#]imku"\.BWV5>v(MGWHW+43hx{A+[jG/i?%%#+kmFW}U-thOWꎮz|szաg=Yd +[lGWOzM^{jƄw"6lmlQ,U)TWx1Jorrʌ+C\kFT6eb.1!t&,Ya'&5-فU NOޡ mZzzRj0O}1խOyܕ?NtO9iӜJJ.UEݗV(1鷛jYޟH.ge,Զ5Z8j9`{?|}Udf?ge[cBcWnrQ-Lu!}lLԶVU,V[[VҦ[J[Go1"oUgmZFKm,YDjڤru>slm7(l!Afnzr~+X?"PNf_$U!4!ZvQ?e7%t>`#ˇ~BT4V"~zm)|(ЍGLq#Un; xUχ6u'Tr<~u[65QB2xis>|w!n?$q#h;者Ҷm.6%kT썌\"NL tw(O9shLNj͘ ;P۳ |$=&Ots=ϧ⇟7PT>eO馪Sk4;g 7M-NWխGYh cazu/ ډ/}4@Sn&xϧӏu\ Ƀ⺿e8,Xʺag(|@>jP9\|GI]U ذY;ܾԃ\nKвUL:-zW`B Nٽuw\ﳜ"`=CFA~w,wL&p؁haSIBcҘsAs vla#*;:e|K[ 4I_|'aٿoi" 'IX3gDg'4ibZjڸE[~J[>*r=C#_{y!_|ze֩HTGHi0D{c}ق3g:ƉnJL\O+t$҈; ,Cs&A8S1OR8Ǘ{oe ceFز)㭇M/nA )xeU=׍\o,8] 5ٻ@VoY,Z"kS3~{%oHt^m n ޓWTO'9ha1{rHnxKQ31(GQfɽ.&%. ã1\J183sTLLZ&3xx4GKسM78t,7l S>cr+҉HF5O#|e㮉"換 w.l[n.a?PJl"͉n=pS4ԋ IJFscD-Y gΰX',UbZKkôPCX!xjЈ@5F@IZm/5@8Y3'\j\1$mTE=I%S#e ?fwZCcDjK&= -:sa4DO)P"+pLîS*Reh`j)XxַӚ,kO 뙵;i3x- nyP<ۚ[%o&[K|kG{"b8_J &%қ^ƹ(Gg ܄MPO'?)ʛb׳tO?{5B_rLtj5!@I}LUb6X]RBpr߭q_Ӥ䛦MR$MWtJwES~*wKfyTJU24л``/giћ~V ػS| <.mgipc y|:5[`g|Ȯ'H`؋#7-¬pH?TW &),62m }ᮐ`dEp0F '?#38ء2]u\0wOc[oINwdb $:mpqg7^ÇVD,+ՇMg8uhp=sPQ=Qw뿶Ev8/%#Z 8_5o[McoK ԀH.Cu /}i1o٫'n" B%9(Qc\TWC+.0w2w ;Ksl/9s_gUa \r LNqO ;Tk݄ /'(a{iև݌.wq|OJ h;iMîJ;<@ue`"߂a\?{FeJ!fG`3ۻؙF Ց%E8,=,9zEr\;Uu{x.y `YoUb(כXr'y\@}h0*UqPM]^*qELxUU:{e]ٻzqd7v-}ù^fkضg+XyyOf!gW}0&)8ej=gV0E+ a?9cm}XD1B#JGiED).+r:!hrA -fBLە?#Ϡ fZkZmگn_`w&@X)S0$"d&9I IDH$$z$:-sc{]}is$[mp)5ݻbvc6n;qcЮԞ#֓elBHjA?ՎxY+(wA-:b^'yL{r%˳LI"U,PP Shd8XN!YC "Q <+.d Ds_P"zaiJh 4dyb t-]槔8 "w4B[]sS*XdrpS褬LU%Q93un|8 GS &A2™H 'Z[B4VXNr6 rʹH,Ny FK@Mr!gI]o^">1 W@e\ @fhjks1&PQ Sb2! :U\E 1i QA;kŔ5..xlv0c=_AyOT)WSHj iXdGH;-lQ:7`:Fm$KH1DE2Q%:  p"KeQb6FryoDN'.yg+Y89p?s'dmI(}O?' $=:T h?zPbhPhj!IY0 -˕)B H6D %ͿAD[(<*f\rcRhVbW>&cȟ}oÆM[R'GYe%%Rm㛦DŖ ;o Omr~oOONS{o/(*?Qᄗ,d| .zCH`F1਴I#d (2*@p08e=1`zI8AQKJp A42׻b\b!/“bRľn6.'70OAu7~-L+}Pa2$bB}"!*DɊ2weC60½ge `:'LNh lBL$hS8bb͎6Pc;G F9|>uf)GyJ`$$[fmQ-v̇|s˝"ȸ;-MM4mrթccu.hq|x5Xe3,uCkAKFBX"gQ -u:K m?O9;ʪ= Qc(gI)8N1>:ASJWNv0|qh(iE4v|Mvч23խϖMxmSbHchMb1 ÌT#hI&4R2YP\*IT'A<t.IKZd:RR!D !(㣉sˁ[ww3g۷+ 6`ؿ]]9d` -!Fj#)*cHmNn#! jhC2ȓ <߯*u 89k8h+ %495!tN)䪮$$f3Vl7ʺN-qdhPDJ[v=_BK%@<+]w!.UxH2&gvr tF,YaiR(3񣲞zOk//~eO7~oos~|oƋQuzysqnA?M16U6Yׯxx9Z?/|* ng򳼢QBj;Ɏ3կz jտ\W [lWH 2O \ ԭ0W,m`rǼ<Ī? 8[72jWe,ecn{|7$ +~[1jp2D 8‡z^MXJ%"z:[l*O{4O];륯pP͖ ǝA@Qf|[,\w_W^oX[5UYml{9+L㐗$V.7 pa5^C=܏nآ74_.0e{:z =X^V h AER~a"_k7Y2sz1Izp2FxZj7ӥJ࠼SNn-90% \ʶl41vD{D]fC-T12ߔtzZk_wt⟷z1o/(Ԣ͏<8,?wž@}~w EN{[^9.&hܻfz"kA![(a8X }laGqlgҒeya G?1rI],#BS"_B|7̻+o07)ك26/=EĪb͋ʏn˄CKͽVD\~/~2O~?i9In,3Kc-ARֳf^g&&Y`YЄL~> & 7] vx rX^.vޒ 䃩'z]<|MC4Yw~ir/`wY{r<5~Aןf;J{Nv$/*r|{4^!.9_L͋nh)t|s,%S :D׆T[QuYN̶%v[T7.G1JʌYjPTE8$MNQŽT@LQ2 xy:(4P.;&Lůw ~{-/{{Ś; ig@G !fTt!I*(S*5]8I; Zw3K Kɑ[Y_*yGyG8j|y2^r·#Dc,mTi8B\&שdDq\"}lH% `J yU2ۣT|t*ԏfkA3K[IzI;:H@!)y_{>d4]* 7Yo,^^Pg&>6g"ׇ\n/ü*,X\- ;|uoFxh#, ~i;E˄F:֖ =g7nߧNq'n/.>\{s8_ca{W>{1S]Ai9%}}<Ξ.GFb1Qp{r! y+6o0kdmeo˧ҷO;ߖ?'jl\+>^׫W;V_'ɾaқgKX?qp>YcI+_0g?ظƤ צ_w_mۓ[{՛C0@E10*<7<7 F<7tv繡V`"UR]JF%y.[`:ӵ-z RCb)-/}dg wV珗 r9pЁ#=^(G2Ph{(η#@AuBr%V#W˼ȕZ\ oV(WA[5yW &T#Wt-r%]ҩ&W(W9z'AF@5ޕ:3wJ&WE@Δ_+*8`PAx@Z.WBZV)9 ++\]15bZggA7p0}"0`&a~_DS( +hruhӃYnB%RbVL(MC픧djdZpEv)=6>Emo֢w.6{n͑j٧p L9jA/EƷ4Ammb}jo./lbyNsԚe7D<9p-Ҟ 1]|+ AU 2e u, lrve4:\9RT% B5r%j+jo0J&W'(W ةzi!#ɕ:5wʹ(r j+T\ 7 }\˕P6~rED&Դـjj[M0(8{J^\=^?5q|ep.N,WhDyW(Yҽ WMv{3Eh-'soԀr`1{&t [FK oA&Th&[;VU 0 u8BK~0Sj3|ΰQkH8 .Zi]ʕP5:Ab(_\ 0A5reA/hFzr%5:AP 2FWZJhJ(6:ArdkZu6Y\ .ZJhJF)ʕ`f ag]pEf?wŔM\ɻbL F>MN(jrurEZ{W A>aaS&WF̞MoΔ쨧jpa8q"0Z?юaafޕ WաM:(ɕzJpbZP4wʹ]5:\yDggscH9<ݹgj@Cމ? Znmǜ-f4rYM3 Z5ۺX5g=E("R8kWheJT-F?E D#9\ .ZJh\ %&W'(W8E5yW HȕjvQje\\ jZa`:f m'\)5a+'CT$W|j,\j+L9C.\Eu8烀gJpZs+V&WG`S\ ǻPTNujJRs QiqnXr%j+ճJ\{6=Sh M?w5N>w5 O %3\ Ĺm6:=*W\ uȕ""WB;0MNQ<{WOBsMFkڙ{;}5 *y_eX*V6xРV6vUsbT &vZ3,7gaSX\ ȕࢯE˕P:TM+#/kIZ7{J߼S+Ώ"`gqƻZ={J^\Mg*8Odބ0 &ޑ>LuA0ʹ8@աM"|7!Zh7}fxwnBPn( fsAˠ\Ђ{婥! Zje2T+4 7@-NNu JP9'r+|v.a-rŴfr%6:A2=wr%TwŸNU3(0{J(56:A Kǻ\oj+0wb&W#WUMޕzJpE)5wʭ&WXGxz,|xTYnRJJ^a#m(>uo\Bv~,ب熋ƠcX[ֽAWݗOmήg?ȪW?;#^^^w.7Vϗop7ry{w}Ʋy;C!.qA5򊽧~Hӧe&eo_w3dly᯷o#J6h }LY'p3*HAw! ? 'n/,IϬa{{D{S\~vSx'bpC],/rFRdKgnUFg%X@BF Vs!'O:d!2%\;cT9vOĔѽ3ޫbw&gal@m}iq]֗^e6E暀l,r};:*O-WIk)1i})PTH̭dR1-|D|n'-Wm0D5ݻWRGr6 #Ğo XꉸOi6Bvȝ5cƤN}NYT w)F|N^{ l̖^4ϒwg6R2x.c/YFXmؚtfP:jHbQ)ޅ ͽ M}|QSI7cǓc+d!@x"{h7+YbC%;Ś[ybх,uz}g},WuUIu:.#*|p.#hn!:38oOA ^̈KQEn"棊1͋BI1M`ǀY;Ovߙxa(ehYsHdďw*ՂYeUw ڦL[oT^MJ)h+JUe ,äc*Jr z_fT(39՜ HDNW2^i C.k/AXżۆjbPb$SaS3G. O&!z*4ʘ} ]{ #BK|u)}nb:EjC$*KtPK|̡cLGuu $$5F:iB<ᡔ5K q[O κ$!H;V@@]BZx _3.D[vThň޲A+}Ƣ |3<Т td!.h]`mBgTHQ-fWbd@BUq@ơ"2ΪU%CQeXYvnjpM"β] [%VmZbMGJZ!gѝ4MUJPD[vTjj5]ZVE*,e4 %0lӨ{ŷ%e SLЍ![[&wܓWq9@ރ尺M_{uT$K5 n=CnNN44z63 [ DiVۆnMEѽT$I"e56v( ==62#bpPaRDluHmk"* F m=EףNJdt  J`Q Q%fdi SRC<֢7Q1l]` ?v߁yE!8ij)ڥA$\s; 9,RDyŰ aLw(ƈMnƢb$ӽİw:H1-й'GJFdU8FтΨ19MAk.YJ@ZXf=ؤ=Cɗ U{&dd, 6#TK.dP?uN^Y%kP׮BT|ƛւ LW YW{E D+V1:jNàe+J V'zʤ辐X?Д n F;k|T(G=zMW:PCX6\b$*ukE!nކ`ݨ1jEr7 "&b9\Y 5I-%T$nBN Zf !Kk|A9oU ]шw9ЏL0H57[h#7W}7VN[i>Jk9XMeÖ(N߽~ydٖl1LV EǐPVTBǺz*-hMw}Fxus|z ˛۶:͇W{_GCu]0?/߷=eEomq6/jHf/|Iq-Ͽ#a pz{~z5g/zq~v&?##j3R4o_I+-/ۗ\G[Ϸ_Z]Æms>ty6zCDJ_0 WAt W&s0&4ņcEUoi%` Я#-7w*X_D'sg_F|;<h=v]=yDާ|]x꣑p~Z)^r9n#`?N~pņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wl+^oՄ WJ3 W 6\JE6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņg4pNWV7\l:FÕ F*6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ WlbpņQ W;Gx+K^ڶa[ Vk{ìP=kCw Ôaltada@c?GCH3!znB$y~ 5PZtutuNq+Tz{tE(c:B.x'DW8)6(ytE(u`Е{лSɝt`t\g6P1JuAЕۃz鍴S+I="ZMNW22]']Yugp뷛ںѮM(dA̞Ͼ޺qinƬ23aϭ>Gj׳ryy3WXiPNh|xow@MpvyqjXlj_ͽ9EWY}6ů~u_&;|1lg]w/ Xblܛ4,lmXHoEY7m+ DU٣1>jL1@-}džŇB܉l5~^8 i .|ub{34}0E!hQٷQZ0Q~(?qFޏ,uAkb}sRE>):NH b2b=1Lh?}aB%#dS W+TFstLPZ ӡ+&Hh?t"[.LWCW6Մa2tEpC ]P*c.99!"q:kW9<#UPjtutE)]`?"a2tEhcMH`'#z npS2!졋^B)#ZC`) ]\BWֹC+ByhKLWMgn,"PZtute3zJ a2tEpdPr1xtD`67+TʉHLWGHW^J%DWnjyKRq1xtNpv8;(͡<49g(fBtJ5q*tEh+Ԓ r*(z^+?.?@T)-?&ad0*11LhebuutevJ #&CWkdAuo|gʉ~y9Cî,F/.o[]%\>+˷_}n.~|fշ뿆XoXbrHtP$Wpw8ŕO.7MØ_걦 /Wr{oZ=\'RC۞[isZ;]Rns5M+$[\c2b~X~MK> >>x5ҴּYNu?N@޵$܇ݽbswbI.2%; U!Ѷ(2 C6zzzݛZA|?F0ۯ{0hi, cYUXR='ߟj1S8jEd1in'WEHJ<ϕ*u VpmݻwzfgՕWH?BNr3uis)| .UjUY?P0?@.Pm* _O|L~Is1-x9&U}7Wo ŁYW#^;d_&6${X]/m{|(5];Ǜ",=xk(Њ//uA]٢D# gV~Rd<eg5X3-蛈 R{u·/Ug+_lONOem &^xfS yCxQ)Մ$N{K:Gݙܤt6e}:i~NP.0 0R02*Ljd<)냉KM^,a$EV {hď[ aˊfָ(u9gsͥVW;处o-Wo]Յ桿ͷ/5W23fk~6v<w۠~v0yӏp[S:6%?^MӴTͬ[wp~N?7v|K00ŧep/ӯ+s3 Ri~5 CyC&$Hëp,!!y" )t !"[ GD#$^n(908* e@>PG 9ֻ sB٠wjd =!"}|RRA8);Ŝ5l Ƒ`lW kxt}op7O$.j Bw{tM~n޼`àWtF>߸)Ý 4K+jP NG&pJ)#R  @g2_U=Gi( {EH8jJj2'r4xH50'uƉ?4J⨂z3k/RZ>xHVtMRǀ5BIB0蔥!(I*~a -zdvW1pH`hQEC{v#@hDPIFy9qW AkO]䁘@FC @&]T$7$:j2.Gi5NmQ\]js6s.WT0˳ ϗ'~ۗ/faf{fb4J} X"$޺߳bڹ;7^^5hb=r1j q6wvSo4_zpe''>[qѝuI9롆u?P#ysRIOLL]0&檨w*쇢\9>O'ΌDGӑ!i-i ߧD,<{C4tsρ`M:({:ͳ]ߩ>I&t9 ug@0iF]edAjeoW=m>jβm[vQ$ Vs2/kT'㑻<*%TW*L) k.10MSeվ.~CptNտQM/ٵ)[BA_6jߟeFs賄9Lpߌ+XtUy0_LN)ϲ Ps%Ty3۠:]lot1d>cq2h~ɸkf)ĘTf͛4sOtx?}_vIQco̔)@O1PP} P]OԛLo^OR&_OLS)]<1 sM@QlvZo>oy 0mޣn>1IvTmv^ oce&HlxQ7/x0 H!b̾fZGN7ql$'S$OKǨSH=A&T L*bs */,'}{9A- ;P0.)3-ML?cTQH>>qϺPhU5Ho<2i1Ϫ 5gLK #31{%WU_/J"R}%Q{W0-tCM~ieJɿ@ZP`}Ψ.iERcՆ'@j9 SUO؇-Fy35VFΐQ"BbFz! *7:I"SkxX)Ql=N;[Bzf!% 3Bs4F.;*|HH:eg[]ܴb๸ˉM6طLc1GkEXPx'y]=z4,gR@JsRB eŞ/7EK/1UHS=^Ř6+f[GE &#V3,bN$13MiWō _ͱG*%V{2Z2l;"pw2"J@ $A0Ӡ'<? ـtq# j'v-B: dܺTrˣZRH`90 =8Q9;O9Μx;LkDMVk0k IQGzk!cc6\D$:!O[5C"|Ⱦ‡D.a=iP*1Ea\Y%RCwȥ"L>0,c{c r7t !JX:1(auf|p4ɫ&M/`G\ڛ{Rq&$M)DQ&G+33N^ȹJ.<" \r ƙ?9Lɹ\gyѣQ=u:g5*43T {ŰSP`#" F5*Czmp6HF7 }$'>`8q-/um\qXt(> 0 f1grH8+7mY:p.t:Nh~-H5pee˶;A;?~xoQxk{;oB!ghV{t87iZ+Ib=*)T[%[n+]\UNa~[/ʮ/B͡]2^I~&}O_uѳ"h$q+KOQoX7'DI_H+]'N~~0e K,֬5 n`P!Zxs9`8Mas-Γ[{ UFd=Q]XײnvW9<-{=/eÇ3ʤH]A{^JT1r[z ]"R6n_ AHRVHo;c !yeĀhj4"899bn8ap c"f?:C({늑)~B2gʹx.tK`MSe '@ Yvcgo+}*u.̼yQYW 7J8v*7kTTDn`ߪ'Fw9W>I:an< ~dSB4_Njk'6`;mb|v3"ͩEfvL޼P'/[-sĭ;tLꐢ_'cv#KS;Dn$>q8QOMrQ#b*7՞Zu ټwJֹvizjS4Rԓ->0!b^r# $g\Kr16S)i%=2f$DcPΒSHA%F(EE]0RvS(M &'0QEy`><`ՀVj]|81,=ߟ\ǯs^R0ʯYMd,(jrdQT 9r!X_PEII*7iPe]E]cfu߀1W)PO1P|lَ batd*Ao$H%^L)LSQF,Y+ZTmfaBTY9")P1-ͣSPJg5Bo{xP [Wm%-e|MW<|Y.mBb/]**T*BAh ʗfY0URFg9˯:o:;M]kdiM%iBl豐Cclٳ!dt !ܗcPMt>˫ zX(VbtA(2(m)>S%7)!h5,#"(a &fm"xhE!1 )-Er P0] Rdb7R[CzjHQfg(fߚEGdĚY$rA:QQO2bڽRj.R6P)*'$N`~fߋ99[Ԙ[~=YY'jsdѯ\eQh?_v~u<_Yɿ]Yu#RV_5gV zC 7 v!yW9/9c׭ZQmoNjBҮ$5W# vjMHV%х8zgbbOha(\޷MFdY (D> flw[g{+~'ZVg^)پ?f{z8}??lwjr N+ٿ]5)* lbMUjﻕM=>3u^Щ`IiFGըYE|,䇿7kq\Mi]"bcɉX/gRjR҈"vnF,Gmbk-2NwϏjuW7:77X fgo}$Ūw~b,dz$PPň?'ojOloeI[v*mdEНg>ڌ-H;%i8~ףd.9,;4L O u5aoq`ɖX)1 ÌemGQ\" &:e]@죊r&b](IBk"J6$u+ x>P@VdqL o5= o0VW\[p{ 2Zp,AR'-د…N )Q/;MRֆ`pSDd:[zr3qީU8㱈GDdvL}֑j A$ֆ?CaH0Ld]N07cA%T )Dl2S)F%Z-`!^"LT{UE6!:icq4E=∋wVBLE@+V oM, +}NIg(JGEq8*u ohJCC=$]>fcW3C>כ~Ss(zlX:ԟCOb& 6/jwa>gy9?[`~J]}I]0coSӣ kr`\OT^^JNr~p%hbm`eV])Rv(7K6km},mTjixx)MDoxӔ&*R_sy+ݴL=zցxK{If׳时qzx%,$&'Q8 S T4hRpSjǻ䅫5kxA]v#KK)d/2mm`Ʋ*vWXpW~tW>z@N!wl宪ZwU5~JqtW]Yfk f(d]M~F~{!&q"VGBMM_~ir>_%?d3a⁖9ya;UdڏS>YMt#?y1֏U d>{-=e3Rt: dzbo>[ڠAĄlZ1p5.ukJ}PitrvI/mewY[j{&9?E=Nǹn'!懬ujs20}Zoq?9U!7lLؒ*Ѡ!$ҥ<~b{-1k"@y᳀ElQ'#)J3uKY?wHe%o2kX_LME<,Aϝ?Ln7s|]~l~}Ϳv$Wo[z%_/.$}6doZ^AX2Xs*iZ\}m_f5`^9ҝM,wւm) IK'D $u9]0%13OQO_>^Hbh( 0+"d@r چ\[W{6plY,N*rgNy>*c8_7'缕Ҹ4r `|mG&x d΢T1:/5 Uj[ԈbQ&E9*ِ4:HXw0k̔,!֭esoP׺k:arl̓iI8"AwYiM](zae䮯]/sK *+aHuA:,y Zg̰>'wyKb6bqM~?دݡ 'D i12`JajdFA@Y"ж›01)Y9ZqO .*,23ÃB^PJFy x!ʆWxu:?>Xn\1\VZ1C_JGEEhkiՈɐ*Ͳ 9@Q1VWI%iMjol0`qubZ[gȁ !}?+p^Xq,rMՍAeTPΠR_I El ByZ*_Fm!Q 0Q[v<آ1)-ƑK2@`":ѐ>"Fq0 (:Y/%Մ"9*|Z wΕZ?ZS,_mS&.jd1d[j*lRUn]ϖ0˗y?Gk X?MŒ&>-tyzvΗ_4ᄉ uR=1'wlFNxr5jE=}f(n+I-ӽvE yp&QAX,i~?X,y;=S߭?wF19\{juyug#.}y~_OٲW 06@lF}UF~_.GGwjr m_׆&k* \ƚDMM=>S:4,aF6`=b] ]5=uU-7~U_iMi}G_?ux'Hsv 7nf oMwފ] H†:١B=IdZ4? )g@LBKKpǐХCxlP“'YH&D.f\d棢*d D"1$ؼˋo{ѶX>r|zkgA-a'x|4ڸ'X2w}(@iV)0ЅdEǚsg41-dd)˻,@e4IhmD.$L*]|g,|@c$coel|3N?R"2z&hKҠvH4qXϪ$oCpQ$U B@B°h(#Nbq42uXBb4j aJ)"A.Zl?֕b"R]f fh,a,9tW0auV?*1b,V)'{rmJQEoj2WȎ 5:]V[^z+Z ]q~10J9 u[Ժ:VjViz[R ?ֺ}ny}#/,%D)b3P(ID;1jnUYoTaq ʅV.[B\+dx}+Cφn<ív_\4*۝uꢒ3 Wk+d/zRƓG־<_F+"al3{qPCWvFqKPBhwGXanaO~5U.5mUٯݥ>}^ reDBuINJ&zIVR)t%hp2҈"vnF,Gmbk#2G/(w+{ {l#y%~Ӧ99 #k<;6iuó .b׹Rc1602ɽWϽߋ3=oU?PU?DmvѳJ_Ӥ3%N6:f%2EmX}xk]/k!<~mwNUV樢PεE8Z84Z!@8RRgI4 I^LBFQ Dd]16 AIaqX RtT(5i)Db^- kУWգf!Lɥ e \ ߋ2:i늱+<PJ j{P wȆQ.Eh, ꑃ7<]e\ba}ac(=@ml@66b,Kz T< ÁV<Ha@eNky~v_`ծÒe%,S Ig2c2I 2IFI&=%J (*HFK]g)ghm&*A*CR>0`+ZG[Z j6AXT?iNFn&Ȓ=twCUm4^z5̸~{'*c1Q}uӋ)|Z0[U Yt5M}i"W0HasڙNI+bSk Rr@6g9gIZ%l2 %1X/wiQ^VHʲMafaBF鄋|TbAAmUBu46Ux.{H0-~<=+OJ\Gո(6JcT(L$ZhK3 ey-uq_k(31XrZePsZ\;gX9Bp]͊6ӓ=^G%-yg}kZo=̷;/g{T|q;6'ިWWe #yblOG'D$A9" }ڡNl!*1 Oɮ''<%` ]h TkL5*|a3ƾPY}F:EE;J8>7pܿH}zo.Lӓ:,I솼d r`IN""JFu"zlTA B(nSflmS =THֵ^h%;gtbk7ӎxm5z޹y"R,ST`'[bJ11; m,2--J ): ! 褲e_CY$#smZ}TTHL w!7~ؓˢJ1aJ+#bcG=0LLC~?6#h$'ߞ_RRޣRưK0 dDn0nȢ,Da6!.%f 0ɘZoj&*}#E6!:iɾ~4ٻ6lhy?X+>Ge;_X$a(H.V;;3랞^/zqm)|D3Ĕě:J.R&#T Ϩ0"NjxDXzqzPa68}(vӇ{PaZI5EFnmx l) .osӁy6F-c$dDRK/Ryjg{\haTJ)a{||m6k5EСK(᥎ ~)?9ke-FL@yM>jb wf-UTsc%bD<NxQ>߯I^8;8?w Kѭ-u7aɄT`'{1U*Z {R 7XrΤq_ʥᴅ%E߂IK.`MF|-1S|pL# ܤz5 -"/ <@S (a ERєSJci戾2P!PG 9ֻ sB٠wjgU nkx$MiGW3@|g{tXMS˺"[6/x/VPQ%HI5(#\8A)NN&793O&3}z:UOy6E<s)G4Z-@)YR[S&JYyh׍&?[U}5Njz)wa$xB&)c!d;( ѥBWE fQgr0B_ *mG&r[v@hDfK9x.F9aqOBHA].@4H0(AD"$&Q:߈)dro}/ g_b2N7K4F*.SXm`+~s^t|9YXW&z&50*K0wzgh 5/]n.''ſ\oǾxT=j#Le!iqC(){_0]9/>GVD#ER%]tD%ZdArOӛO'c_1)|r\ s6ws x}(k &cdau4l=OڮOz*<5 }Z0iF㽉Ȃ{R^E0ͼ-Gm(0UpP>_Vj^zU ; C_@Cj h~6t\ ڰ*$Uqt's"O})>YKH3sS,y<'o~ug*}мn@/D&F9  !->`6_ITq/^kb9@q2c H8ƂYѼ)I߮o]E9ލe<{n;fvx<+P_1P}]tLPj3{oH3 턷1MT)CsE޼בd'"r.z̮fڕ3%676Jr8EjԱ48`q1J|;N3d2N„)"6%|v`1A ʴ1asP<]( x51'3^ +B9L}ẸxVV.̃ɀ7vlQ^\xe($wI}gJ #,YJكB?8Sp ւKF--uԠp,%A4=cAKnbdS1"L9f3d^@ FiNȔΜ^=VQl=N;[Bzf!% 3Bs4NߜmB+|H5-gՉ|{MS ):S˹G/>dJփUSg[1=^}&8;^I}Ygz4@ ciXɤ+TK '|\`DK-*we'DbLa3ϭ#"G@I|fX 5"xQR iJJpp^PҠmPV# $$Ypb\q٢,<^\ #e؏Dl.zqGaj8ǎՌɫE@BmJv=j(.:*9%Z#/]Ad,B ([H(@H2eVh%fI{9;[VX2#3,&*Xcޤ#5 5*gEC%Dh/+&-Ȉ&*H'+TT!H% VtYβUrlǐ6` aO K[?yX #AgS`=VyBpU6"Soz_F:_)vR$p4( BF(NB6 bZ٠{;{!i\ 餣HP C4:rRK/61+HGqPhxbP49 Cx"Zl5QZ EXM :BS 'sK[>HHCbCEȮ‡3 _f4(0.Q;R䈓c{y#x,XƓ냑\tAE',- KCj8Uؓ H6 6y`0l~253JJ1GD4<0Km*CV0I2}O.~v EGy1tJ 5ؐ*4.p{ŰSPðrGD0˻G5*Czmp6HF' <`F%KU63Fx[t@Js@5ٱuT5EEZ">u;[.۹8БL &> a^´d?9-J-A`!(V8d!c y*HD'D S*]pLpy @LYw=bY  .tJ`R1G >ݲIBkYqGkZ ј;@6r6CZ+9o|6= 9gBA/=yÐH r0ĈRYY/EǸ.q 7 Œ9K˭.#:F }iǠ5gw֔df\Wv)/ T-OiƓ\أyå O=cFD3@c)[WL#,8mJ;Csﯦŧ]űp~x)ӵËaT̡]Z(Sj E4Lr=]6=0u%ڿt;rڅЋ qt?;@v< fZ ӂ6>tl'@ +GCw~*˺}_f.Ls9 & '&Y-k#9طnqѭM:l..$0YyR_Ǘk#6`79;h>] ^/†%~g޾r7 TU?6ƪv~Xز%YҍUngdzmN_AdUӖ_I˺u;Ճ;e_6rN7sj{^jڻ4|σ~.FK֏n^7j45 l66;Շ~#P>AF;$_Ჯ>7 n6~-{&󴶪KP85*>IQ}Q:djva*p韠Ӧ׏"cwPx b:-8r~}Gm]|xԶΛQэ;SaDH嚫Tu[AU 9 ͟{j\Ipr"z1޳T'MUwYwڒYObZ^:ܻ ٶIK}zM(O;o j <3fi6姢Ӫlm&!`n-E 5reOϹ uK^J2RUVCWWʥ%WWߎu.g鳓jFE]L"(mIÑ*Чtm%1z ==f\LQO<ȥwoߟ4BJ< qF0t,äownPI4D\XpXcOjN6Qs0sh1é&aJ7ލ;6nS&78}Amd%c\:WQH7|1 ,QH+Ϙ%:Z=,:a*;L"JX@Fdݶ&X`T9FF"HMԶ ` Xy$RD3o[g#gm5+8|8;LW w] v7CsKs9?lU<"^=]) -N5ZkNt{O I kÓ歪44"gHBH`$-rXr8 9è~:SLRAT%SMF-󪙩*"x5Dr(e(.*W2IDLبJF5n g8]gʶjJmO־IMS,fm$LX@گ9^8c|]'dyls AԁV<[; $l!?4ݓo  .P3\8f.)$Ad\38 ^d4LB R.Q,0?o (y/Wգ`u1OZC~el$ɒ,\4;Ndr·g}B0;Gjjh>-`."ɤ>P+S+(kHA)jŎغLK3q65_j%Z5NV;-Lhw1{Ҧ=W;[|W[oO(vLFOWSRZ5d<njF" \B\KAFDԁ"Q%3RgIxBN7(J%n:%[wU[^8J0,)gӉ]E+m)U[48 !b_)cCƱorJf/>Bm'&۔X=^E}}Zs8MI|oy-gKt)0A"t 5M%sM̪@rN]yM<ů:WӦ+iRhB4Na_I (΃ޡ ![m}w V{m[yX7>kRAh`?fLvN+9~Ap/&у?Ϯ&q54ǐ:J-Y[vѕxhɺ|-%U9JrZF ֐vT0^%&CEr>(AjZ`ânwDW`:P!iJ#G,- _BY"/G<.ƤDyg}oiWEJrOOG(A]lq?KD!,eM}2󳋍?Z~qXݚ !B%&R+iQ jyF.OiQ^.'uF_S%O3rz P|_w4|:y34'm<^u}׍P$ANFƣyɉ|ݟ&9j@ůUo aho*I3‘tYXYPG-Z;lh6a(}ܶY(Dg3(ŭkq?fJo*NIFMJz2xN͈zzx\5ߣV]Wٓj|<~ ,Vɿ]5 7A:VL*TկV~_bϫO)o{[= VZjz?\215.RW gGtQxx}qo5 ޔ28\w2?yQǚyk[!l3Wۦ|ym)9y+*Eׁ $¾10uJQS !6AVCV/*kZKZRf |b"M`)HBe0+xԑT*,(&Y_^Jf:kxѸmkr~otMuҟ'Ut'-264,UԱˈME]D5EPCfS,'%`GYEfhk1V(BIR|1g)UH) wa>#Z\[1vk4o' :xXІZ$8H3:[PZZ}M dH26HBȜJBk'eъEr)\4S(uWf| si,EF<`12cl?J!Q!IB8>ec]t5<v/bP*"2XL蘣c}r.ŌAok KK<  Mk:v'Eŏk Aq_|~W3C`sl_uR)#>}E~B5mwWMbs":u耼 2(.IEY(Vߟoe/a-HV~n5F[K}{5kz xcJt`,99˄*W%[#1x;˻GỾ+pon᷃HBr?)_WA|(ճvIm#LgJM4vHl6"RNdyJMN:VE,% &: ':/a;etږU"RQ,(Pmx%02֤Sl槕8w i<Ԝ}"%<3|HדHhCJXGTZj"fMviѿ 4A2(T.(עV!Z#Yxy [Ct!JW(eAW"$xڱ7 JO)wCM~6 EL)fcKȚ$G&oy"8ir iMѐ$<[ ?jˋ{%¶ o G j|p`̎R-s\W; g`)4U,[Љ555wzb/bjEM\t)* H(S{e%gbqmTZbL,g,aa ;is@ТХq3qfBM+k 7D'TqTwOV@1bjN1.Ǽx񫣴`=4[4quLI.IS$l猳$!!:'ܩ~YMkwsF\RB)29ITJ"׮˜&\> eUf05E9႓>@"%@<KTyMA.*L{3 هDCd1E|H1T|7Gjb=R16d1̎b.O#ںzUKKP|̦8͟Rf_V 69ԂIƆL 5^)RtBdաԔh';֔JZԔ̧W}-s 7^vϷ;`Gm=rgl[ҤA:[Vxgm/.DN|pBN$8R"i|QƁ5EF밤TB.-CO.:;^=L`{s,i~V|0iWUyI龨,.zɬ,Y(XYO6Y烾>"e ї9/* Cf8 zHmYr.f~,/WUyۗjf~9{ϣE ); #:||4bm]_NעђjӋ{̾WCYOOK`5JyEePYw(ՁByKA1yO6}5ؿ}]PlRo4P>_ŞRƟsy. TZ.2)t!e*:נl[Et. #*sm9_u֙K]I])f;qz A%%S WT D+kn#9COZM}0B];k'xPIb0R҄f5Ah6يDu YGf}TQ͍QmQ'(>_vlB /moq9EJ?4]|E *&0NTn9)?L#{ni,v$tGZ,hZrQǞ՜maG6GcKw3 +2T;9ITRcRS$^q` +E73*{ Bٻ]fx(Wny< bMOgSWPb8C.Ott19>(`9cp:%.Fi|yGQq8hgp`f3s>aw_bg eHe.|i! ke@Th -UL+2YNG%Bc/GO@6:?} ~pOiN8 b~k˒3lmfǺ0|aUUWܖw6zګ-YcBY]pMFfc`"h>1Iuv Ťզʡua:G:]lQ,:9#wiz|=ܼ^ɞoXw=踴DܪPQoȤښM3f~Y<vvKBX Ѻ&f]U+Hj5;}׹V)}b^ w.zčx?׉׏+};"RK0SzD0%pΜ7JGSM(Nb-lPJ]JYw3QTj3؏yn3j|9]}yX|)>/'# _ X3$(I$DK6#"[ G{i/vC/N0%,9(%b\a2E&]Ǚx6(Eԅsc̱|> { l >qOy^ڥȈV߷&P ft80k,8gٻj[<\ۺ[ܲQ:a 4'!3SZ*!x8p_>:9smU{3Т30_>r=q6~=geߨ[&з*BA_7?WZB6x2fXwغ[k%`rbkoւA8 뻳] >yD cqA)V)L] /ǻE^LS3x< P;dhחq-wS񑆿abf.LB\;>u+{i&e) רˆ{&Bj&QNUg7)ύrKIy/ #}.;RTDD6m䌃d7dJr8EjԱ48`q1J|;SJH=A&T L*bXZ1:ĞOPKc-\Z⎃@Nۀ9JIX9xͤ*id+`;B/@„ٙZcm]kHov^Vq-E)EAߵ'hY a|0OkLN=B4@ Z+bAX*=هA) v< t. v%cYb(rΔcX9CFh 4`$L Hireqc*h%\[ :aFhNPƶ ENB.wI߱ӤjkiXYir.Oh%AMLbzz Jbu L|Y3a9W,VS/%tؐg w< \H[٣[ TD10֑p#, f S11CD3HZ鄒1&yE+T4hiB$ ^'-g 3-#yQ&!8578_(H\gJk=}scbCYqt^wUG虃?q5EAR)p)X ;'SfVR snk 0-qpg\`+1죨Lk;  Xl |ڛa0֖pi|fP4TJF!`RHdDԈC* ':mZ#!|KlKEsa'FDō _*G*%`=VyBT6,& \>_ r_F2_)vR$pp4( BF(AOB6 bvEZZn>^OZ1o]ҸIG=ht֥6E^ 8E-cW VG\Z;]0z<~79pDMVk0gl\(1C0yOe'ss[{C|ag]aֶȮ̇[f*M40.Q;R䈓a{y٣=XU NpG[;dW -syFTȔQ_ EUT8Lץ'H'CBlr g4;TO"9W)g土c)X̵Q*g N[8S<'=>7yfx c$2 ՠCҤ@S-+=՜;"YwT[cR;ĬWiGjd~s@SzmTkn]e[v/@ꐥ6ІdUQ'.Q7j!~S)w\ |lñGGBG\jy> a^4ghy sZD)3lnACQp2C@"T<5$"OT(ǁ:J)Xw=bY  .X%R1G >n$uL, Ƹ#aREb"D4 5rD3^;/t>PP`}}:j!M$ EPdn-ssK>ͫeQjՏ{3ea*xQNc.G.O8?Q{X]a01zaQC@*-4YV;ch\؟]첱?Qv%OP' % Z\mG$jKr)>ڒZ&"c⊠jٶW,}ڞR%EG#hUDPzJ*~[iFvyq}rV FXgfB'^gӲua6AgxQge'V!0ן}Jۋ`X =+3dȟYm`a(p$~W EwL󁽽; }HY 'LEγٗKڼf 4”lLe#7ao?+j:Rjsfȵ1OL0&~eX]ժUnv`"p;"0 'ݵ+ޒZN`aǤ'=qȕXUV㮋+0B%+xz\ 1{.YSq3.E&oFc$$gD\s/km#9_0,-\`,.HXVL2I_!J"%YEփS=]Uꪬ59ZLt>9)M"ghމL{;Z`s:˴>oz+5*޾IXQI&;d,:ψxQF{qHb.ݩF- L0x1Pu`_ TG`CuQ+xAlŨJ.*v= Jg, RW`r\^bhpUr%0^B ϞAJbUV㾫J9Q]AҼ$]Ur_>Wè^Bԕm5ӎmyeޙO*[Ŗ7ˇTa?Y)*16Wcl3Ͽ@ M$5Βjx$8d꜍GA>Go'mte_Yw?|w7G˶7*8>^.G7P^$6EL֗%gYnZK6dQH0"&FmF/-T BfF 9G-Vh# u+Hy5fgl05Ck#9 JeG#d]@WOI{#E FVDĨ °\zXHJfKiC]d8'{<(q:QY_ot[Ct4Ɋw]F٣DZSY|zgYJN)$^~>({r?cдbˡŒ%9-eZ@b&]4k{ZOiAwa-Y/7:sڻ2|XU491w/zy,Y >&c#L/Dyת/v-pK'~ЛƳ޿Ż6~tK3 ׫Ghn M&;'Gu$,{ 3<:d$cx'|G)/sGyOk {ź0>38c@6'W[wEny/UOtn-'㇑O,F#2|T-y|ke`Ry}VsW3y?+CC05WMMy%/&g _y~ux{ZT9=yPzNvo=M<)xC[-UhQP B50'LQ%""DLR޾7M+=%PsgpUvpx$=hX=k*o'gw3wt6g!Lrl֡N YJ!Q*8 ST)gs'I=}`uP3f揀mZ |$[/ΰL2>0]]Q_%ڗNo1T]y2HMYe<0 yqo>F{퐎UThxueqG֓+8)cG '&#˫1'r >}W?b8,*\2d<{G؎t8m!eo]dP~?`T? KJ'̷2M:,Uao|$h"9CF+6Gp4܊'oo- "O'.ܒV`l65 m6/LHˡ[I5Һ6)6_?V.ZIۍclDH[v:#\+wڳA}v w;2X dv*W jx; FQ|yɇAHSR $a,:DMJK4ڣ/&rWG{9{X'Gu ݎNz:1ǂ+ YdX*Yo7>) ~Uw{߫ɨrZhP,9YDJq-^ X˹ۉ۷ ؟g&lAG6н5{ͼYo7NSF׈D,ro|> jG[8XKh2ԀƆ!\(|R, f{̺i>s6)Xdѕ`ub3r{;R&*W$de@l[Ql`*dtƨH~nʾ0uR)A0{+ L¹cgl3>6l~#)+wl|r1'6luoZ1džgmV}/NWC L@QCa\;%Y!+6!h Lv* F6xgLgNGo=k,Bm^!hg*|9KZOrYh4-b^|=}`&-yb]h-;슃eLpZuxpCyrs6\"J*6 t%:. k1SilK%۶ToNol˝h޶ U J ƚzɔduFԼRd$M>XmKۖgOZrQ|vbGɯ3/p$FIP2ީ 6j$QOKC.DWkY$ֳc'K54Gu߀0͌yxutS}Ֆ6Xm٩얃l U˩v^U/gV}/pW4j篱&:߿@y wn%MQ l<ol m:n(ډWI@`lW'tl.[:AJ, V3B"v[ ,Bq R@7y$1 ()gkAX3UFc焢=ۮȹߨ^gI*ړw"4#^ Sb1O H)BL&嶻 A?RNAJD "@B9$T׬+ $ 28i ԛ)KdCXs Nm y8ɗ~M*w}͍e  2RPFcJ f*CB  /8؞ԃCJoub`k2[zgav/tLI*Z?E.?mIFM@^L! @b^-ts[{n Lj "F-j JA*dE:Em@)ylE@Q\QV2Ji&6 eR6H@r:#~dUƵ_;|3s~=z>68@gV>m/g?f8g Z s JȐ )9*B$| It0 ?NO0v/-~<=+_ mY *7vC!Q 5ZjN95ezj)n0aAX = k%% mO J2z}ݖowy㎊OuvSdz%kwܽRي\%2p 'AaǍӄ6"!X*LbBlU !9G8e=kɂr%*IQ.)`Tid,FwtbXX3B^ YEylw..n6gfMu/v8_ WȔy CE ΠK!De"HDQYQFmY'iƞ pg6D+edvD#:Av"^Gَn< k&池v18=j@DxŬJ{:3)"cFn |/9uFHi ~|&G)lZYm'76E`)1VS6V:(2 '~D?e&k~̙oo_j0?.Fӊi~Qn ~Pl&lY_zɷ֬_YuɮWlWNKr\^e |v #KU60)yH9&'[gT#EYRm55vhY>ȇgGz "4rDT^jEHd"B:(AԤgn(tL~*Q6!gNs8lбf8/.n?'LZ`<'R}#8'bb/я!xmWx"(7Ox;"T _}p#9kǑ+?.Gaz\4I‘-f]AJFyuС^n.ㅼqr6x> mԻ.S/&y5-Jlhz\OK5OբMƚ,_nHh/|O c48@UcmoOt|A;ĢIA۔ PRj+X3BDN HָyW/2}Ǘ7|ޢ2ƳjMyo}0l\ȺvV}=4/zۨ|PQy_k> v 1Y$k Qcdna=4*tI`IECĿӑyZ zOz|9׎}z5:?~av.i]o||a-f]Kv%!#=!#U},u2^iswd[ v\k"766ffs׹m4}kl݌?7w9mtf{l:Ė5\ٽM{;oOvyf~[ݼq=]x~J7ij|Km1lzCˊ5_65nm)pWdjl40N3Y񝶛: H\U}7M+j F+pƻN}7zlww|κp_Ȫ/j3J#w,grrnu8eӻ)ەlc4{_oB!7UU(8dehQRJ[vDҗJx)~g_RU䌫2"\/!J$m@8aXJcJ4g0pDž>pWEqMK_{3l?4X^Ek<7Q~ (~ϫiϗP]sѭsu9$SA*?5W-I&Fkb.G ]}.F <-"#ѣ n"+J}u'dUo[1a`F|gM@ݦ9m=x0C18AdxcڃpY\N/SP/Pwܧ G!#1fGb̏%ߞM)9J PgJ2Yƒg{FW!̾dfg`F6'N }IA([ -:~]H/dA4P'IdJV!ireqc*h%ngqoR0#4'(xAc5rzyt=<8͠zn:1/>T~rA?5|Еiՠ&)S/>^C8u[F:Fj/9v`\Kr1zr|~vu8%zfuHrQGQ)-uEH}hD0{ rF]%juuSWQ]I?첓rqt^TNi/{t- ;{`e8! d*.pwxqy[B Pr8K[)J.2Y`lb̝ ,/d9 ДfK ˜Xs$ݳgL9z1oɧhU/8h-I 6[TF6a_!}_M:ۈe%˶ ՜MLrJ͙"Ӆ^vFl|(<>O-[QWë0lZy#ƗGƛ!ZIE_73|{ˣ0fr'>w=m|c$rgޝ{b@D^~` j$SXT"%?Qm?Q)E_#\Iz1,ǧ5֍x۰I :+k+QΩJe3eSY5ݮUYUc֢>%^j*dbL"W <' .XP1HRԥ5p )n$tBI 2,*P j4!T&PZ,3BR3-i $CUl0]yҏ@yMDh`SfUWWYy髰5E>nMf7EQGӥ.%8Iii . 2JpvJP$ @)AZNr[P8势8 l%>ZaɴƎPμp`&$O< {M[3P"46eȈ&*H'+TT!H% ,:mYZ#rulJW +5x|YjX#Z`:x[qhOA.`#]'mKJQ72")EPI$A0Ӡ' v癠r YH 5ۡbKڇ0.PHx3 ՞2#,RFXK軎vD Uon|g;.9p/忇)9t=q}Wz2V۶j`;TeRwIWTe|u :JƩp_ҧӱD!L:Ŝ1s>2NI䤡F]k2Qq:h#;op-_Lxj20ˢ:E/f(v|BSO8Lr;>DdeKvȿ}CI323* "ڎ7ʢF ΋EzPi.7n;-k .[wB+=&Dވ޿-(=ޢZ3a0_:5< i~T5t޼`9Ԥ]O"镇@kY&NbN$[: /mJJ&C"6tY=uqᯋVnF0XWO9<0|io+wF02̋O^պk00W7ݱXs,d{˯>0qi8yV"|Κφ6 WYbSegޮjY;g?5J;$?ܲ=`JY랶&75)^ ZAeO߳ޭ[*YW{оQh%xihHCH8g N&ؔ~-W$X`T9F$HYRԔƂFQ4`pHPe?ck<TXp/ׅQz3R7PƭFO#@)Ý 4K+jP NG&pJ)#RX w%3O|zjUOy^I y&lDEH8jJj>;*8!9iv4OƝ'?ߩ;5KN F[gh<AN4G,E :oM|vBބ52zGOv}0DPIA .^q~ȝ_'~nۛУo>)q~-U(>9,uқ_}w3a?MG>B1t ]eU{_G0("3i->΂q S M]q~K7͊B_C-{ 9Nz?5j?`/S |H21B,qK:E t䄤zDs,HI>vd4E_c&q?St0Ln2&ZdU_@3˧y<aXi?{4OS4zk2# o>B-?Á." ͮ 6OxXrX\9vrA0E@^ړ߬}ﮗ&Ս/E4ȧ-0o~%=0q֟ݖdgteO{_R-̤$di8G#n Xv}m0l[rl-d;[KEl TFHE<O>Gi񲔑Ti0 +-tGerxG~6:33|xIuWP~]Ǣ|Qp2grЇL:YBzXzfa^xò4Ca`&br^Xh<<0}|<HI'd~]G7`y"= wsٻt?r&7ˇ2"8=.Y?4Evry۷7sU2׆IMLqҒ5wz7H5Wùҟs#|?e,`$0'W\f3̵d( t?F,Y3?,*JViٲ?_o+˲%oo9qΉ+7bS*hJ-U{\A)U[ }L/(^POx6䟣/mD^kAy9抺kPA8]ً-;^\ًgj{H_!Ho!nI #{|8FW1E$6O IQZe4gy~)5=e"nQQ -m!\.ЛDѧ^!$# Im܇I@K7 r q{!KZ<^k<דۻSW,JA?7\_lhݠ:Z&l^uF}NHBS Yn""4~:]ʳ(/ޮW=<:ꆞ}E HLijkJΡtT/h\W%l|芫hUl.i'+O ǃox8Mͧg ܭxRߜy||^eq1Ϗ׹g|oi]CGIVܼ۪d.9kҖ-U;kklP`SV771̣H`_AeCۏ%o^˫F^8Z߰J: d`}1*hE329I9Kv Rev1rhcQBr^E=3&e7t@ R_~5UINbC .ezÚi/.\"]𠳹CjvqgaڗA"Tp9 y JԨPtBJYy(o:oجڼ1l]M)"89CI5ztR:Ll*S-!V\G A|s}JK\(m_ K E\!(.lҨQ_D8js MZVE11jC+JWbb0as@%#Fv!kH+;!{26 (&:Jd^KEV>-Biw2廸% #P)''4tw[,lQSU%X(ì椯0ֶ\Y&tWD6CqcڣsB)XzW|&d6%0KS^kNi]0s |]f4[_Ψ.PUFS;TG_*{||/8j૩Q^Zv7]DVnj=QկΝ~l.?m)?*zJ-Wbet0Gըiעj8{p]~(R\iuiz姖Wǚy뷡joܽ/cUF}@40N`JXؓ0 rQc "C&FQ4:Hhn3[R2ClB$VKA Y)"h<٢KNYPͽ$^7syscR΃/&_tZukEԎa,z'k٧'X2G該@VQ!ktPPtZa,* C,S@+k1VuTIR|1g)G) xFjkb{Ul8}3 T.pUPy \F*2xƾ*YU~֪@L@;"ZhC dN%9 aY"eQ0%LDBA۳&5qsB!Z4ڈ\4NCR^D1 XtB2 ՞ʍ2ϱ]0/`4hsTaK%"#uIsDF'.#Ġ5m%G 6 Q[_ 27ZB;3cbs(8~eہ< V;_D &o^̕TRm;TY_/C5??ܭv# 2i:edۃ툴^iy >ҺQvnm>z[o7(,6:jjM#6^sQӠ~OKBq‚uxًkQ>j Z+hReh%#òGgZ ~ϤnM;|ծ)W>7+>j'Q7DŒ^& `E"vnV b+-2!v nk|;7/ߠBS?.׷'ʧgḄ )ɮئ4blXGI#'}1bֽbɒɒ}1id&)L1II%OGYKY$I!dS3FE}S + HHC&TYb6 $pg؛8 _.=!Y>'juz=fV<(nc]1jmݲ'MFD0%+1ޓƎ8fg_NJjIQAeN+?>R~ v5 6dhHjx̲šNX\~\1` "04vlDZ)_d :AhM(kK<1`+W-k;aa #ʰ98uB{~d桿 *Ƶڞj=/v~=պ[[1BFjYO2wPř4̿q\;O UY q=-~(*jĿӳ~sƆ-+b%8iyZi״¢dXt{KV2 9o3"Z/:ZL},tuuws7՚-]zɛnS{@o]xl}Z|1`7?^]F&N*a::6N..Q+d.p?Vi.Pꔃ ?XӽEɤ!HuN,5RhA1d4D#[$G d R"KpJi 0J&?P/[ C9[S>T!E K"RHU+&X ;]f~%|=c:CE:U'TAP rLkjyiT Y6(D-m0)8'0ls5jEsNf΍?vVoAzӓ=l>읢-g'Zo|uY'mpgzվ{T#ZoY_z^&*[Vxgm/gc<0(Dq 4bJ jEt)]I 'Gh\5OHrM.'Ii4vR}{y@poX7_؛f UϾF_p#Q}EEƷ,]v+Z\.khh:=ezX~fpVmd7䁁v2 U("僱d {"`y~=BU4QBm*Q0;\m#[8A{.7)":{f/qlB̡x޴c(^[^{DwuKAR?X\ 1љ|lRJH#0%xWٻ6r$Y,E`r{;v|ƞȒR_H-VL"_fU~zM2Bf舙Xפ,Yhpd?2Ƞ:_gܮ[P*%Pco}vjDձFTF47cj)+䃔INH*;":a`͠_gACq C,UτUd6Y0IKE T!v9whqyY>:;}X/A/zݕJEPZ“a/ڡ5I o[ ~50EIۃ`g &%>NZ#f)'oэoom>[m1{OPU ~.i+!fgϼ#k*_ՙ{.yiG+׳4?5v l

`*N#Hp B_BN .IEoS|]8;?.5H[a8Ri%Vr=|S=ߴR 0~7xǩ#սFAϬG.UWyT/uPW8}`'`U%䡨J}WWJՋTWV+vsS y; YrӺD&OF7o |#d0FBh=w h龅MRŠ@DlrFr Yv(GvY&ţ=39]d=W%wx>o&("j5/ƿ=]aΛO;U[bj[R& FJ6hw*@))E$HZ逭BYP%HEN),^]7쌜ۅxmu-;ůZ0Wذ)㍓ni>&|YUr&kfۑ yw-CR&)T+VK졸T}wiW* .ҶNT j\_?ԭ RبBQ'ş^\Vv=[}۷zԷ:r:uJp^~0) +S :xh11T4`JaTmD>Kb(vEcP軭Y-L-cS3H)RG6 lRQED6kg֦=BKlc񫔜ņ~tVo/I׋M]ߥi1-?򢳾: i. lqR1 @YDP$ЗreQ5㥘7scl^4)J2Yi瓕"*u7:K= B6}W[b}'yamnšQHRtAkL)f&?L r"T@,q`ga[1pS-Mw$k F@{1Ֆm%^Zlc2A(抴F6AV~OZ oͦEGr2+1d&I.H7)|YQ"s{)wדڳ&ݲQ9۔IPcЦW i M&zGu^%<G f<̝kVZ#W; [19\klNnݜ^<!Bx_l#*Ćl֡N`|mCFY$ 0d*-g,?^Wg͓ќϫKg^j8_]#ժko.~17L>L';,J} v_7nqn\\rk0ڲ^kb;XZdi!hz8ArokK1XP `ҥH]TBF;U%6cLk 2#y|WwRX[b۝A;܁ɨj'U+J.CiX5+V/"ۥ݋4G5$Ƶ寿OPh#4HfF, 7lbBVDj}{o o}(EQXmRPJRuW : e :E$۪5HD#:h0, jffb#pIedH`:w3rn¬k;hŇ;Pg{m3Inв-3[/R06>DLI޵m$Ow9w 0Hw\ 6 1ֲÎ7~)kƣ5eEU_լ/zjVǎj02X䭴55D=ZD ^XE3.XE0}SŨL hU(/R UsӢl8HSٌ (Eh悷<M$&g F8-]]8~ DCNǷ{:>UUC2Ρk>E8HjyI8OLFt(:N{9Q枇 Q$)DeGHei)kaC-&~CWy!-Ecۼ7IXqbzrdqMVR>x14?fsG\4l9S'N E&,JTxAmOynƅ }58u0 ͅ'eSΤV)"Cb@z^ ͓;'nU . A42Hb\ӌcBcIp-Q23-gV|@sߦ=&G+GlauJr^z!CqѢ4s8>SF e̖ElY8 ٳe4&#8r+шܠL9$tWُn2.湠v18f=j vg[p\ET`|6k') k(Veя ʃяݳywEdD#)QZsXmuPQe(ӄ=rv7%:MSnU=m`>yy'+]LzfҖrҭݬTe,(`sYP)H(JB2Qt)™M.t. xC =I:>O=]waɅaDl(Lލ덒]i= 80;\Dh@,F ޘ?i]OS9'<9(&LQAYEb&MTe2f,zvʨ }V8D Ժ\͢c4t7Qn7æЦ߶ΡO}n6{9ŀ4rXL'T A$ G%RWi!mz`x8`?)$"V @R\QvQ1?o G%2wsdz&Wtw4m/-Ѻô~ÌVxhz*cN1H6r JH*RVs+AYYvG9o&b!}B})* ~q`/_*ـA_ȥN(SI)"dD6^CH+j CS, 3M_GU*nd50tś8 ~Y)|F/>xG^dm2p2~q>d?VPs^~.`56?g髦]*Tt83N JA@T8vx^\JqOƹ?_4Y IVś&sT@EL8 af$jL+םcxU+|>i+oyQhl_G#)cհi9t!y/#V_c;Aڛ9&o]?7BZM|1N5Lgӕ9—?g(ok_ a2IEKîo;_CO8l(Y"8(j΍5-pFX$M6tLQ"Ed3x5jrK}VH.:2B Ƽ_)b_S1qhьzFtۋa$ J%# :5) :Oqj&˙[.%nV/Y?f9|~~ެw(/7p).1$(B4Xǔ4{FNHm8wDKfIb{c Ypog/( 2w@Ut 4L`-0`XQ;$Y(@rT*Bꨔ! &9IǨ.2\Ya;+&΁vտ̎ bv(j%{[:(Uܺȑ$#A$igDJNV[#X{(io~dW k"?$:IJrpiй5R@?QyjEOQ-~R'C҄+I0 "b& s@ˠ$.3s+Z큣pU(Z182 ?πj4*#Ŗ03r>.msh_2eyn)Pw01@ﭭ܎ʶ:>マSc _H{y Jʻx_;JGdR "N9(py35ϩ7y1 9:haM䑂Mj6i/d^sEߣB~MY-?x*$y'v2T ýqz|7A"sw/i)J۵-..$a(YJS-m)YRg}&b`y܁_4\ ᱻ,z3d՚w5iyޕ6ck5ЕYנiU뗫RW#?7Ȼ t;Zڕox^Ur֔^co/R/.*J&gUfq.uZ\8)k!D"rjc9'Fjk'ZD$#DfGs{ʧ1eΛ\6ȴ2DD;:t> hk D(#8kp!&S >ipBiW-&=|;@:+ԔvԦɩy7&jW;'u]'x4Yx6K}v.3HΜƘӳtg^K|4(QProdMW;p'݇чD)}0J1R)2y!oN*r}9TIeCL@3ccș.CI9'ɹxp`>QOTKF3[ym-/^y#<1DOF}p9OFj܂Wjq< oU6K B{bj'yNڕaٳh>,ݮNpq]-"ժàzU^oڝ-O6'qos+O d PRj" [љCDN rq! 3F+V*]Fn6aꮗ,ZF/TjBؼ│In{Gz6ߌ'mFŘ׉:,L ,؝ ?BܶaN{(ɶkIu[4nKWWdԩ$|`AI-K?ԬzUOZ~7I_wO>~ן ?αqnMzcUTum&vGJRO-g>|<߯~#hKd~m3D hNME:j:yc<N`y"}oM\a%Tt-SClA"ζPKԏluZ4O/v.ɤcJ+J %Ŗb01H}ɾP=c7TCH] <˴$E%JAKf=zP *uO|}a'qj-NY,|,wLaq/+]c+aVbn3fG3x7K;]4їemp=Bv aߓ fŢ|뇋;(`S\` *;,䧥B<:}^zXmXWwIĿM n'/^lC-{ Nx}kW/;Rᰇ^]5g=`ꟸˋPz](^q< r7RohUCiYhAX'qKY;zcvK@> mU]ld mUﱒY7ճ#ڭ4`o~VjknwQW>nWuv̓warG뫏nmYz$Frk7Ϳt5v-D6S?%.#iB5;v{kTٲji'x^ŧ("R6D>9O"n :S :11>5(ٿc=(BOON1滓ůKn:ʍ|]y&݄ ]u<uZ>t( U`QnJ ɘU+f*tѾtUGodǡw]ؾ:]_9վRW Ճ+كHWOzs?s?N: NW@i-ҕ'L>cOU85h=SۓތrwNroMm-<ȫ:ivY@4Mmlѕu5ִvfYqUM"Yie v3Ȼ|t.4AOH0fNGv/(E}r?!ࠧRpBW=Xq~CW,>)KPz:)T誣{tQHWbqnBtl:BW@+骣dw7HWֲ]p0n2tN'wцpte(mtY/2`]u2UG̡UGÑ ]yGʄ Uh2t&:Z}Ց ]^OI]pj2tdrWKg2OWfQ#]MCo) ^_]t~2ZJǐB]#]zdҕx],)ܭ_.b0.5GBI:wm<ϖC{ȻFˏwE+4e|]ųj:?h{n/1U1+e?]`~ߛCuw^ k]V\ɪ5'7H}T0UPw,@EDr⦅-0 4U}km^kMOe0QX^x+rPf|hD*%c)[iGa߼·Y5$>dm}^_ 8wW l~^nR-o7;BrI(*1Lb*X1\O~_ :Q%$GBoӢŔfpa!nNS^jIA#ԗ;iAvPc1JԱD.p-mуh9xRڳ5 ^)Z0J&\I%F-`bd]v-y_C>|<DrIXDA:ݤ2IPRRı*Bi{,u[Xd0^E $ۀ1)JcYUZN1*늳g@bcz^_mQeT ڄTȡ!w)tu(50T`K@p+JL,yrh.h0Zںyx(`a b [z]F$O$z}ysd坏tlɁC'dHQOC|\faw@}VerVcmNs•H ^5kAr-PZQNa[cHz7[!$b5} ppN?7ҲChBK.Ș`MDU1/2'T-8TAOdɌxVEO}Z5er1h-،+!̍р1KŠetAڳi `BJw)vT&LxeH`zaL9&Xv EA~B m>p*h-SalK Κ:8hʸQCrT@V&[zW1 TΰWw@AqtU&uC`q_bnL V/I1TbbXfی0A6J0&X{Bũx(R3 JS_9AeZ)|#ƒg2f\Eoj(PB]a@D!ՠwSW(c)ˠ D`Hc5@ -ɁHCceU?}n:!nMI;$ XYu0w*< W&ses Ls."J@T`PzdB# !mVj 0ݙD)(J892| XD50ǣ,+Rl a rB@t0&b.1Vv J2HO5"-̼T} ӕ!!ѿ8ZgDB(! e6 =)aی꼵 F Lo-fzrnRzƬqs2|=b̡:t"B NA"1`-lg7`f^zZs//z ުflɘNL˸ LМ'hX+rFjAuPaP%@Бg%7ің wSx gg0XqxTZFxPK8d§ 3Za0Ӆa0F@T/> ݗtdP&͐ B@8΍d!*T?Vy*杨ځ`də0)a:@oۧ[ Oly\US&b,Sе2kwPb 22b8AbCկrK\Ha8#tKIC\ISA>5%$AbDDT׎f?Y@'^H!%vrm>ȨQ"`Rɚ% b5#$ғ2Б%Y9vhΨ$1S"ʪ'J!B2 ~ȃ\ #vGxc]բ<Uy>&U%VM [9Y\j%Ot}, =]D5j@dc$SKFU{) p 7j[! 2$ >of%"ݵD:v-nb>l|L_[ε=2b0AC7H7iM63Q\H#= UN7QG[c'#HpY-:h4v HFrzã!MʌТ| väDy XmFJ #F >f ڙQ'%\U0M;hFL )cY'5C)7  z0,mBY%¯h8K\*J#F\5$\c~{5] F,9Be ʂ8#!ssC:H1-й 槀GҥwmY~T`0|XL86zZSM VG"մ ~VխSVSE< Q@;Z cZ&ܘkyhgjeRB&Jb䃡,5 Ȥ "NW +㕂 {i{*Nav䘙L}`+ o2J !M9RsUZB 5n,Bm,Fp`Yt g";++BY?)a85Ƈ(NcO#FA$IJ#` 9" bۍ:YgL[Ll0:zf9I|pD$0vw=(@࠷I0JL%wJᝈFi@oQG:P5?LRJ"WߟIbO7?jR7!(j3ꫴ}q1)t1jrJ,ϸ 5មT2z*g*f0>$%)KQt0J&z+E%kTY 6@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P JW09 %l^5z(Jtߕ@JCP @IP J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@zJ >mИ(Zz*K@Q TbzJ T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%Uo8[%s5J TF% T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*^~jܮ}TR&>m&ׯ_rb̻t D JOpKehp %RH.ݦ7L޿1$-n81p+We|tfc4iFe aTn0&FDJ~Jo{|X L-VȊ!RRq2Y,xǷc1'Xg8QNZOW_MCNQy|q뎳b5nłgUyHGePL*`Ϊ \|xQ B?etdlZvxYI|*Q+ γUf%ꄘZ'?A.[32 BdXAّ 2̵և:b僁+^xspV~ӑbW`WF "t(pU?*Vrp Jn\ ]s`b{>\+ _^+EWF؈S.N[Ӻ2K6jhא*YYE'bU.qr_NKrs(Z E*YP_@J3?jH㭧mԒ'Z*ÜLlFޜ2Fޣ 7V@X,*g4S:I-R#e扨c9ǣ/@"17(CX(r{/Ew2ذN\^f|ݬT=K2;}dMݦ~w*M$l>e>TX ϣl =ZQȅťnj99ս٭0R&7k'/ J:n-k04d:Ӂ-bKf}'JԾR{ `c` ̵pU•&B6 bCb%}+r3!R3 N0Xt(p WJ!^!\Yʹ҂,,pઘ [pVJpׁ+ĦDŽ*)_]d/ Wn: vR~,վMOo7 \s5 \k;\+@zpeXhYV21ZvXi$5f%?܌I7l晫"WE27o|nppvG^p}fq\o_DwnпsN֝wtbog{s݋o=^&v\Go~g)^NV߳?,m>i}r2w~~P7žr}lt9m@xNG<ܕ>^ͽ eWoWΡNzK=pyei#7 r{|6fcgҐ^BiE{}띪ެe]6aY:K亪~kϯ|i惶|4lry|f~sKuIn Eoɲ էr^ۼ\~@P6fD'@NE'GC9Br!\WgK㸋qwAתL?5sչPDL)e@)X&\_ڃV |\/0"ELT.r!c((K9dkHpC"x&ty }]z1>@6xVR4EV$)6&5%cfaFKq}BW܎Sny*ͽz5dR9wYD@SuVVk&S%Q9J3J|05u)Q\ &76L9cN!&9ZE}3sqߧ SS2Y7X93NO4VXt.dA(Z{;䖞қ@r~8FUTǘB=2g&s9PgYU"P/@cT IKX<ٗ4G}pgro`yuqsVAh_*Lt M2F9PY1ؘ$OZNxV~HczsCV/nBnx0?.>tŴ4ȉUQWVY4tu55ֻ5j@{ٲ_nѣar,0 &dcIIN X+J2 a[5ӤI\.%eKu_f)tfAUY !@=L(ki ϬIBWG.F[ÖT!G=}NJe%Z^f'PD,P9U..YTWR?eX*kN+ሪ4 &>8dur;٣nz:uJF-"gёiNtP1*3"aiMF'"&r@e)c\JJffڔ ($F39RYLu:;3/Bvh~ߓ?QcC2&odg%NjJein(ď%0 -#Vyd̆bX$&2 h+ 15dx44Tr}1mny::maw{ݶw}޵ ӷ13 w}[<.+-fխP5y\U\J 3o Q=)3^8eѕ3G***  8*ZH-ھ(Z{E ʬ%׹d<@MI#K o%se+5"H:Mh]Ѳde Ѹ">H2*Ź-%$n=:" .Ze.Z&+'+]u{@9%drְ,|~̿e-0<.0ul]rYOy ,Ml<)ӽyʓˋOsӟrUt!WBhZ+&A;<[tCg_n,+.GmG:}eu0 y` 2 ~UP@qgJ{8_.>"Nvcl ^CS+dxH>sP$Rբ(yHf==]OUבp©֙d|Țg@T) z CI-h#Yxrrx1(, ^y~|*OV'I8 li{p5U/C-Vŧ?b U/?%\dmuÖ+"ג6VˑIjSg mЃƶ2F Kh]?.뻇LtfPgf!,e똱yFWIA -ׯpzs՘7Yy/g75iPG 9ֻ sB٠wjg XHǞTKhp`NJ7Φ΀6arvFĭ]wW+wX'NeIv@)Jk5X1EyO^)Ý4K+jP NG&pJ)#RX+nsg + Lg8N=yi'"x &ER#hZPZ'IIRN< O3'n'Ϟ르tޅ# 隤8k)4GJo#5=Du\^0}Ŭbn?r"^(f׳wغlNd&C'nn4~g/]mdBEOe(%"֎s$IfrMbd4Ĉ{Fҝ*q =!OóY/"_V70MetIo882Ebp?pTyT}fttnFuhqjzQop: NN- [h80p*+iŞ7:A ǩ ,@H:mw-`E4,£O?^Ny6ﳩ U?W3PoksJoSx6͛|I%J[lǤ& ^y6NRgœϓΊ]qÇw?UߧSETBqY4"wr|30Y?m[Kk=)|OST4qGES~"?o霸$0`iW@kp5EOxŶAz=] u |=eB|xhQM 9gwGEԩk`o3ΉuO}BAkJ3π{go2Fp+$t%py i|m l'4[iv:Gk#H;5X0|N(fۍM+ɛnǫ\&m_56߆O@ĸGvx<:$7_6H `G&KwXn9G.ѸwHە#Ԅ$+&h Ay8PLe҄=%ol$vqﵓ`HD0{tfm:̵;g*ܨ;[SW)VG#48`q1J|;S6H=Ab@haW<>3w"܉2e ~?M1prg diLjbF&@"-zixRNt6ڛ(ƶyў<͋ ^V=kR iO¨(s$Q0J؃@?M/ջq$N?g诵tQKKE],5 K岺G{AM.K-iQcT9SYce %!$*f2РQ{c$23iU*m[EĖPxƽuH ÌМ"goJ.jej Y6b,^T絞3AXN%t~rV.:"Ujsz&h5@?}zSќ,W7)Wu7Õ_Ypa+wz2w??ZRfbyJ>/*@YzJڅș7*dƇ^7 .TaB m0{qht8n[{ד<׭ ܻ^%ZojúUMjut6La>Y+ϣ:WnJo!AΈ;/@.Dx}WO *z iLWi0LJЧb9=<_m?&ɕ'?+UHoVXZR1Qi2L`u$vˑ, f S11C*sJ-$tBI  ( e5BIB:^3 3lܭ'G/۟,lo"PU-s%)e]y05kUiqyĚdfGO_Qm♃q5EAR)p)X ;'SfVR qn6\W8C.Q K5vrń_k,WVTa0֖piV>3(* %B#x@XT%F"D)5$rŐ*ŽN3Y6r6䳊mֺHY)u#,qcq࠳)kwmV! r:/e7z9_F2_)vR$p5( BF(AOB6 bVEA-7wzR'͙w=hIG=ht֥l]El^ E-cW ,c7۔4yf=#Hd)78A TI ڥI`ZͱW,0QlJa^i` 9xڨbfv @#K%9К4m&'k'U`u_la+ύeWE ܵ9vБ GG4-Ii0/a\fD^rÜ%Q [ZKY;HHDvȮ!$RRcccd*RsdA(6\TpA8Pu{fK9DŽRn&ij(1ֲ( -J)~ј;yI6rDץ rHkr䕏hݾ v牠r!L.7=s2)? *˓U1X99C($QK-9+R 3CCkBwԞd'IqٛAG5X0avO*ڎHjqZb ^d#ȮͿNUbZ6T\A1viOL)}b0ɕ^?Ywap6^V%oK=m.Me[9EF/O j<~Qwmtɒv+ A*'^Wˋ׿m("gdvP% S OAsAiڜL(  \K)QET){hgBP-YL/-c6\8_e_emCr. 3]EQu%Ib*"PdE+Rdut̛yBǛTG+V.- 6/%M|7OhQƝpBބ_YŰ(ã-dRYʔ+ i^x;vaz@a:wSÊ2y2f$7SQQU`U+8|J9(Ŷ<<+-rǥT{)0q69ҫ3i5Z,,)DEe!u07<߳,3i1Njo=)-q$<׾04N*YV{VhNyŋ)pkfۛ x3LQZrx6LWu (Ĩ)հ#|+.qkJ@p/_kyK}7Dv1)@r6'\PXpVhKfn d *y^,ԨQQNuhm8S9-€RJ10;JuNqōF:yL@ZRJ-˵-!r؜Y+(E89DqS Wֳ2&[Kb"&@wɻ%znxEI~V˫I?xyQcV>V=Oͺ߾oy?j$1as7M~5b}Ԫ =X`mFO[P\y+F<3N l $#``Y BF"Bs*D׶6oan8<0-@2%Q J1"Q.`˦f8 9;\!,Aʱ4g҈Y}y*5%XN))̍,3|r2B/BYnQ23Ea ctK: iA]07K@EsRHKܹ8K) E`hSjQd YfR{u,, 0as a ѼWyFXIZ*]Pɝw4@k 0< =ߊ 8׉oR0ӪAIRqdh7{ g31דcNb_A.*TX։o.WBے<UVH1c=+ DŽб葸S ebc;(>/v"lXCa5~4-ɉ&4 XSJ@kzD)h1 set@t52B 摮FHWkJU@tO:+h+@H1ҕ\+( .c׮TFHWV(.L@t_z튮աzt(m^f芓fKɄc뛮ZDLW-J+]D{ju]DiĠ芓tlCJMHt g4BHWFrpע/q\qͽ(%*hc=«60ȩS“o|h9ŹL)KaMJUXU>|qY8m3_X|7AUEҵFBNH^{/k&n[{Ff4TpĦmZ2r'X9;F|U톬^ԏ⾏Y+cg7~&)p2 vGB 3)(v3QԚۀ p ` jNWGWJ P<B&<J*N0;L%\E.lt(9JMo@ [&+˃E]!J1ҕVȐNhWWP 9t#]JЀ k]!\C+D+]!&7JZr+}dÉE"BeR6\z:!ڰsi{vpM`+d C)i ]zj(S4 ` R ]!ZfNWHW+k3E蔚 FHjLa'O:osjqieߤe>3FP33Jo $JDZq ʠi"x.<-KR4OmdZh/Rq9OonZR),6,2'E>d[&2y81#)H8Qz`(V+D}J/7Zlt,q)"\EB+DRDFE!] ,_ضU(th QR#]Iˌ4clm_QPڨ]UD Svp C+DjtLDWؒ` J1hADKFIW ! ZQCWWP kWRklBW{;kW*ҳ1-)7J1sւXC#(+wEW\ ]!Z5xBC]Ez2lߡ)l Ք \66mYXj`clw@YZoiYj%ƣ?Ks1v4Ue |~|g:c۳BzM>BL0E5"\ sWqS" H`txwt_;}UjRÏPr+]BBWV4DD!] 2S[V<)•<BZt5FL]`JT0tpi0 3E])Nw)2B FHW3B ќS&B~<]!J1ҕLQ;BCWW]!ھr'ڠԄHW#+˭V!6:µ$ Վ(i̝x;t.=0'iW)JJ+W_ 8ͯ7U+5 `U'Vos`Tqg70 `gWn B4=Xuv{$;?gj L⋣ @4Ȋ!ͯ*9<$V"בTk!OYRf0ua̰^_p ͒ffMf@2jFLZ*Pz-_OS۬=m'z[ڤjl$ R*+(V]ZM@DuGRTڝz໛4ۏ.UR+TUR5OFwbG|lrta>*QK55茟j?ovِM YXM&}ko60ڛ_Ζ_lgfV=Ϡа&#q7W4JA ɼ]jQ[ʩΘަ_geX=g׾&ovcK:_2a't:pivZ0E+n6[=Zdl]cͮylkwzL_o:c];; TGtt@)c;(f9Ӻ3!?u1O;ܟ:fw38stΜzcAZ8_zCn* ȋ䨭<+{<o[L`Ggnw>W[) b_i/ ;4Oj ފ{Gt%Zjv|LլT<|Rw|n7u>0ʔҔ)S *5zo66vW/`1㺾rt;} A"]]mjCʍĒ)3S!&P$ n_jTaZӦF_ˬJ~y5`tEl̫9"!eO5Ȍ>fML;uj4mT`2bjb҅0.h.(M4Ǩ sÄVCW*2tBZF!]Jn2""O#U&BV%eQ#]Icˀqn8V#c+enc +vpE0VDHW+m5S!`H0tp |Qҕ%]!`J u Q-3t%."|WSٷvfZFzv(}t%#]`@w@t n8th:]JMM1ҕ1g cSjL4U IȆոt>RHb{Vidlb{\(yXcu^ Eyzly-΋(E4Ǩr*Pդ`˂++h0&:dt(%cFHWjmi@tM@EkH(th-v(9t5B¾<$$cp+kY(th5v(tO$bǡ]U֗ECyIXZZT$jf`{N2-]G$ln{nuHW{Z]q1t5ZwkWHW޸`ÂjX#[ ] BW?{(lz9tH?XV`^Հh)t@ HfIK[pWy&ZΞJm~7t8"G72s_'4}'в}: ;vw]9zMQOlY ] hx~Jk^"]bSrJÂt)qO4c,؋dA)9i)+l1)9w,o3ꕧ4N:PnFޓ#HK@K:(ɉ~bYRlbjuq)t5Іh^ ]AY}ip/ZjJҕ%6-Z ] ~1 hY=] vK]f5Z`wW?_n6`lOhuڕ|o޼?!i~>5"kp:BG=$c0;d?6mI7c# ZލǷhMڬow@(:?ǣv< +Nu*ńLc?Yn`>Ǐd=~ݧiﻟpǃv3HܫCkV'⇑ڿ0o'd;O 3#PQż|D213p@_5xW登)@b!.ߡ9 ?w8 ۶r_5`'mBYqcKC%z.PMVWˑt&݅]щ/7^o{H1!>}M-Vߦ~@mo֗W.'gzE[7{cd8luRСFXWup}}49e֡1{.ԩTrs5Uu|_`tlm VwׅɕZٷ*BE:V,&gmՂ6'FIV&o-5Atlb(ׂif軦^u9EJfqhѵb$>5ݻ+HXR0lwVlmӜR=FI!SmO Tg11И7f(:\b/9%|n< h`^?ouɺ:ӲQJ=@{RTD2; 0G xB. Ye)ރ%v;٢0FGxt! A < 4b.vכT6YRѦXBK{qڃcFgdɺ3|}as9 AYUW-wvZjjH%%VbP9\:'t ~zsÜ|XkQkNҩТ-)$qu$~`S€ڈ`DmT%З3LdqzUIOi5/*S}Fu) NXMgEʥ: VAfYt7H!Q!S( ٥fݑ#6*yˌWh|46Xvse[ݫ N7h NAkahrOuh;GmZqk8*AĹU X.:_@2h 3±&6VW]S`1uV6 ]˘ 2ƺz6?s%ps L-FU elќGR7fa=g56bUhS%`}5 ɷVQN=RsA19vx X-VRլ̸^qeBCl\`AV4*(5 74 +*өPB$$Idy=X "Y=ಮ ++eq2FN[@( ` B€("2m Y}QEgJ7ukƃ A'b΂Gܬ ݄Bl nK#B`@Y\O 4XrPfDk!\(VZ h2ݙRPHq Ls$eIj gYVH( ;:= 8匂ebU.Kl֒YA"F_R]0r y]oCB9sr4R."pPBl,@%"b C$uWH&мGwU+czh2&M ԙy r3/{1#.UUquHN7**WeH;L'Q"_ %`T;v&XݬDz:]_}xhh"X 3 o b9PT8xigM:V%SCҕj@h2BCT7M*̻Zl_tX=VHgYtg&)hP*ݢrܵުjQEx`!-##Z6Hª(j` F{ŷQ =WSĐڑ&z9#n~r{nf?M{{jGu2Iv+ ԭGwH7ۭ4،ѓҢG:4'PIlf1hmT8ךBԦ\Qbϒ8^PyqH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8^ӭŎϫ~[MG?}.þU]J;$1/Ǹ.Ƹt0Jf1.o޳DWCW]Z>ˁ7XUj\KKsҋ-%Ud_] !.7hhM8w(Yg+ġQ=EwNNk6KW%z: +]yq% +:,\BWm NW@zt1\.7cI}uj8|@û u8DZJj{@š wwߵe]Z ᄅ݂ev攷ø3,\\o 'bQ_OLM$BaJ"`p= 1W[YH"I)d"cfl6fW=]U]ݍpmvo@p:S+YN/E;ՃC/=$0h#!9#{8`Sw<\ N^r͡)򹖜XӶ#"MzκrJAM\g믒AVAg74"Ᏸ D l6YE=tdb 1ɱ09`GdHa)OnWm2zP5F/5FoB{PZ Wg ,uBt:B3Ey:DfbS0mQ 0-FaB{{ 4;:m5S82Cr@aJpLAڽDwZD 7.mV4HTs*25t:Mn/ UBIYGW/硚p}53# m*Eҕ@X6`^mO.m3wtJ".UbWX$5UB8κztFMƜ\IBW M+@IҕD4`7ŽKޞP{+i[ *BNW %]}7twzCX$2Xں:uZN~(EÜA]鎮zHh*֬5tp%j ]%7JB;ztŮ~^ҵ3sqM՝=FurOكu4wghFrBH{[BsS0as5 oVj-j5&TTJR".F;ΡV-8Zi-$FFCYmg(Ӹ~ u1^zj٫b9{5'@ib6[/L7O1_ICsPB˗/_W[rrokmZ5;}׹V)J|+[MWUnWriZ _f]۔Ѝ:zRWפb%|dnNf3mfI^ZweajrW>;;;ta&>(\}GaSPTGEtvh2:,k^TY_~~7ؐLz4zu״3/fס,λ/FJ1/pr|oP  !XD>LރGN;PWXSUJO>uDHKŔ@rHN`q{ob6V W6U_4d܇MC9(E!\ #z!U# bea KM- (H@pP~)ϿKllJ y(`wJx|C[<._J7U] Ⲫ瞕[5NӡL1kCM7f J\ʡP^+| }WŚP=LD㪺TU7Y=}s MTH1.AB" i< E*2-D艏S(#5p7"̮c*|2>'8/PwKS'\ZeK^J0}_ѧ Vkbv݉B@v|YzE)v _bnxWv 662M͂T)cVЇbUh@]Sks>ղb.<Ddg L?`4O-Fzm7a6 ̑/eЗA˖۰nn-/"),1O@*'  $[4ɕ?cMr% e Its/q%:z 2-=X +PfG{sv|3cbǐpخ =>QA& ߪP*Ak)Ka짓!UE+=tg9ECvMpd1F: lp6HF7g)Oc.*5X@Uv#ͥf0|*xw7?\ \|1~ؐ&>٧0P.}(ܣ^}>rs}d[ח}xiJؿlgv~]OfM֘7pcUGyݙbܪ[lf:̡r>'Wi<b> [~klŶ+dzr-+9eGmq6 ӷPy'&JRTl[4vR2s1*7o#,nlT6g ,/b \&RG˭(NLEFtpb1*f90?Q0!S&1jveX/5^#ώ?*6f 0/-n`s̾=iR}pŬ䢿[haЩ<_V-\p;UQ`uT[#VeNxa4B"JRL}`Z>>~0p;653NZ5mbKm:)^E2;*^COp|!OO0_Nen)rm292g8r'5 *2 ڐY5%@,/"Q!\ƨ#:`{X"``&[@ {,شiN}s-8:g U8+ۗzD1ƕ@ *ƨrRgG/6*@Gʍ^ywۨđhpUnbh3Bd0`HJjp{@Q`ɼz  Q)S(, 7]: ٱ>'Gk1(ǜXZ{%dPYjHo P0ؙG1:\"t6Y+PlM_N}WsQ/n3BⴌPQ&X{*eHPh\HGVU` (km 4wF`mT 5T!sd >5x$=>pil2Ym)սweq:W>- _f"& jZԪkbXRʎbf9$pp<Ri!°vgZ2& f@ș*a}5K8R! hQzP.<D3nnzl;v$nyXsK5 |15TMgio*N'疨R搛47}r< H e` EԔ $h]McbIjt+}2)L]!PJhn~g~$Y#1R}60ǜeYo+qWg G뾾6O7IfKn@$@AE;{`ݤM騼N.ZM$)y٠ͩ ,K]3D=bHòNy6aw@mr z7 @*P父ҍR B&(9lsId Gk[L(٪#9 rf`jPc`FGQ m8{gn ctxξu~|NG\9F:BլQ;ZGպ 1"DI lhU_zXPk._T06x/L-vvnC3n ~apܪnAɁAA漗?7Zy|oswBG>vڴ7$[)wom,^}L,YsJ^2zs`j&Wy9A&-$:"b_X UTm8{u*ba784αPOnD2P |sQ {s},4fpqqu{q>_49\0Z 3{Z>NJ$+6eFlME 셬Q¦q‚%8`F S^oW5;^]s,Qu%j)jOvkHj% 9  TcJ1 ;cı `UC!RP C@aXJN(1J^QHeVIHu[Apv_8fЈ#"Nq[aboj X Q%&d,,d$$8A }Z&rE\Ʉ8sL+YL) H&`1.6=`yQlX9ט:YɡquvS\ܪr`tJVrtbղ.qA"@56q0V4cűn1xGaw>ټ|Z̑;W6[]Q,5bw'Mُ~6k:p~ޣئꂺVda_^vK5c}ܞN>_.@~=iW:FgI<}|QvՕGρAlaGȃtVٛ#4?sK{qfTK(Jcfɕu6VB I 5,29{ӑ䍛-ɾ6@;QW02imo頻k^S;p>T7Ia\}bTfv y@M[,%lI Rˑ*LGބnj Q3&<*{l蛈ijQjкz S| m$=wN启 M\@`']|9njmIy08ZLek8{e\SyOkzo=|{]JۼU65%swk/'i;Zu<0*)k3W$Ouǫy~kdPk`ow\kn cӈG6&].ZԚC+a܄ 1JLqASsHy"D()`/GV&]%PDQ @I% g)cW6]ĕe~9eVagh*Hy+8rClHk=ccVRͅ ѐcD6Z(ſx;<54<٩u Ď*{ dŜ"h+~UbGqCP6lYP5F;V&EJWԬ}iL֣՗B޹X観uΞ~fx|Շm*o?Uml8O>FYDFUT*(M., {WJbH$9 „ gbBIV7Ox yФ%G .a'-5p1UeTV5l!e&'hlf L=z ]Ǭ?_FMXXjխF\!nWA+2eBȬ@nVeX*Lvl Z{S"VY4y["|AFŰ|]W}#fıu Bj-p&_q9 N*E=sA r{s;06*nrwޡ"YXS\/ Cuo+HuB”lJ&O^?uo_Z/4Xa^{dPd6Cth"_O)OY::7 BKҞuDlnޛ֧0Q⠪.xyu{!k[܏iJ1jJn^4<'=q(h/廾.`aGQs3Y KSACWZ3L_ZKA:ups@6Vױ;u@r8 9p,& iFkE%~XYȺdV-)2Ժ#gGߒsD01#b]ULQB1 lk& gO\9}h#5ΐ1N`61Tm2. en =`|X1WL[7~Ue3X?>/xLW2鴅benzj++7g[iktM%C>4I% f) ՗G|(McīYdu[V(@!65 >X,gl>>u>m~u 'r+dXw ^e| -ܬqbmgP@ W_.> ^"FPO%7ܳZk8M/r^\&_4?QN+d#n2SciC#_Fܐu6/y a,a^`Uoz3ݷӮVyɤl$\L+mF$VcS߅-xaM ~Hdp*%䝦kϺv0r&R{ѱ&jЙ)-Y]r0*$FV-Mgqߣu|poŤAC҆ՄqQSA<tn"iOrsaG?0׻Qvj_ ̲/~o[ gnm' !ׯβ77Z9 7ef펹={Ւ͡uW(>VyW5/ϫ*]0eG$_>%e:Sm}oWٔEINd߃ }SNi+6go]zh#WwN oQ_D?Ar_ɜ'?la3#6.#|3oߡyj Mi?e?7ޯB w KShe~ZkMNJ}ThL5Bl)%n=3~'1~j|#' <'L;|S>h+~' ]Ds1(Ac# Z<|?3ƅѻ[%@}w<)Mf`\`>[,Oi/Nؗ'JmR#\JizHWl ]\tEO- )IW#ԕf+OtEʃ.z]!%t5B]0AqW-BpѪuEU{1qZՃ A>FiUOZUOJQ tF IW,ultEZs]WDDu7Oջmg|Y_O||I[)6e5՝C#/jYs,t!}wVIR']QW`W•s q`+!v]ez22J]a\m 0[GW]!{,(JsB`]Y=iZQv:IWѕs~FWkDWDL"ʐ1 OHW,f+UlD+,Jt5]dO"6F@&]]^NWA ~F ~~~9S}(==t%Z[)-#]SltEC]URŮ+*jS6Xc@ z"Hz]ya%t86|:kG+,t5]AO ]v\tE&z]MO_ԉUBjw{k:kU?0ܨ^" RIWz)mw ~]wlt ]-@"GW;!lү`+/ Zԉ9maOXnѫKنn$ 9~*{NB8܊v2a6Ä`i4D] G k{ I?}]+%bMz] +6"\tFuE$]PWFcגSg*1hMA38F]YmEw6"ܡxE_Ů+L(utIWla+UlD&G>坌QWHW, zEWHE"JǨ{+tNx6"g]Q"Z뤫+}b멀 aD~a'p8hg?J~RtC^C`+ q\t%ZƮ+>N7<7h'?i:4&}o.Z.<}iih-E77JLSg6967r+cFA/?CB̹^eKKJ0&N"`g q\tEҨ V 3lt )q(]RuFB`)5]R\tEƮ+1 m# <+5"p:]WHiL*]!lƮ6t8t5B]yUtVj'\`$Zҧ1*xΧ+'"\&"ZobR*^Ī`#ȻU?\=~v'(CdA+HzhKﺳxcǤWaBK@BH6%\h!D@ i]YLwg:>۞Rir˝z4ߠQΙȉ)9zptVA{[plGuKn5yQ*F:Vy>B`,]zƮ+ZXButEgtp+:v]eIW#MHWlb+•EWDQO늀g+lNh^WD)St5F]y&G'"\&Uh]CD铮ƨ |0:p j?~*e]L F/`|xT?ڡfԑM4=teZh+V ltEs ^x] IWcԕ-D|{wt)=3}*9tw H-=T0-4@{*',~&^n% p0l^ĕp z؃^ >ƠW+c+ͧNhĮ+t5]:E.{yB )IW#ԕQ&HWFY|+µl:DCBJ)1*/a+VOk]mA']PWNM|"`gp= i>(c[(#YtP;{FW>"JƮƨN  >AUl:Dk%J ]NE5;XWpS9ц{QlsaCW6U/A{%銀=]!nVuEV$]QWxw`} {⬓`]}ӬF;_kNDZ&:7d#ZG3 ϙҙ43f"L땲qΙ: `T?`ÄC ֘؃a:# 5x'=#]0;6CH륊]WDƨ+02HHW<~R}pzո~NĮ+mgѕ1X FW]^WDӃ15'"\h]WD钮ƨ+gXNcWlwu.]vI(F+'l c3NO=0x]enx9ƛCպBvltL;U|v:MEk~}fmE.h jwЛ(jY{BQ];i+}++_,Ve͋E}Sr}ux^S{|]fr8YpV X|#߮NfU~5b\҇zi_p6+esw+MĦx]SwW{a|zxG/,#:G  ߽pZ\q6Z Լ;=y J}vi;W'JD|SOU| 6ÿl.reu7̵V7^W*-%,55K;fן{$ae~.-~5b`E.'eUz UM.K!llM(Bn [S^oT׃W ֢ x e^j-JMe\^*+V'*֝N4-6R|d?iHeUQa\*簱8/VpF@]Xʃ='뼮*/|e kIU$@- dyy0uGiz \}pĐX[jcʪPں)J5FditSKk!ĦU}B[/fF)+5c٘<7(UTyPB)jsa],#i3P޵qdٿBN0r6ag; 5-z%*`6`,Zmm^-luuͶVw4YV1+!dңaמ"QѤ*CdCi xB. YeuK>fg+ZAC |+E4Io囶~L"Tj'=:fDyKƜ 9X\>sYU-wTM%9ѐJJ-At@r=ʜ%xu}d SavnE);IM -ڒBGZGWH }fN^Dڅ6G;%Xr!xQ+QЗֶڔ^*QT,=,eҼXt):Q} ]&|J'{XMI1!Ku'Xg]!(dGhOM6deGގ`f#_fHw ָhS5>#(ؼ($0D!N&CrL0պ]9wdƏ*ՂYe mCka&a-A7rBvFi:Ҫd :!JrAhRU! 0阊'8$;wŗ,J茸pΠZbM=A!%DB>hwAjy1i"KtPK|̡cLGuu $$5(},C)9iD*ZuIBv 䁀:赈 )-f]` &$˽e;VE̓pD"(YC Zc~7XdЙ$ fj2RA ٕ P?AjDPqkUPaRB]$1#dlDH!(rĪM!Ͻ/h=V =liVԀʬ$޲[6RSҭ*x/GT^`!-#r&-w$a:J` ئQ}AoAKCj)M.1ڹG<6/V]·X"-6q$Y:j0fU:9 6IҢGJ${hwO(v%[Y:Zm5 T$I"e56v(g˄_ ArR{R@k80)QC^":$5\Qr#bs"I'%\IbLvP%(H Ш3) @)w!kQMƮXB_YD4cҠNn )E bX0j`R;JQYTPcD&7cQGQ1abY;p`\t%i#*`hAgМۦwZ,nFw -jITAlRԞK=e2 b2 D hs@:'/wy~5kW!*Z>@MtVkA֫ѫxdmQiû  '` aвMf@ 2)/$čt==i'XOrStE8 8%mCɵ+F(ЭtF< \T"mM\4*w3.H1ˡ옅jI:MH28DԅKOq$@Br6B!(L ΃rt `j?l^^ܼZ7[7;m<(`.Dvbm6YL[~Fqի#˶ddj0(:w@P_|z7Fo5ZJ?^ʟ g]_i[zWtjr?gg/[7!lȟzcZc[/֋q?II=95PAJ[OpU2qNN\8 i8ϚP:>t(@R$; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@rNH7''< p4q @@,; R,; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@B0''P@VQqnq=x'PN#tEh[ N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b@ܨfr>"Z Dh?PzNct,N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'8Z^}qꂶRm{s͆vf/O0sK|KZB2Ƹ^q (c`c\z 1枝LWyj/R<Ct噮>v襎3+TtVȹUPtut|7%/Eͫ| ~M?X$bu@[>_y{D]fx*\ 5.a}8?q nbuZy3\aJ,բ]{fX_[=vs_u7Sj!W?_ 7᧛Մ۠Y/ /t|Lg^}ǯOlWͨޕz١fJZ۞41HΧ »A[Yr0d-5C A%"9͞E(}Ϧ[˗9ɺ/{ֻo@1goV>UMboӟ2 aϬ>z{?Wz֊A\"&n>c)ۙ[EnBD/a6W!P%:%[ng{:_Nn.7\]=_S,6ݎ= cH_f^c\54׸U5W_.^nck_@5~=~3ۦl7sJ{J_->gSeg7٨5gTP*j՚tƌ 4+fsFBix-*!s L X7 fc^~>8%y7QUŰ0#"~>?ZhC+BixחCWCN "gKkW{U7FSEW{]z酳s+l pg-DWVC+B1Un먅OkDۯVv~Ұ!lL/u}^/kXZ5f+x1n[fI2C8d'gT-<%|ٟ nUyJFu[CG5VwaJGŠ΃I 1>c7SEqQ%Bu©rhN㛭QH +:F ڿO@;-L(l-G k]Yi\+\jutE(M`:B^Zψp+:^ɹOW@c:B vN森NW+vFtVUq.tEh_]J|хѕQ97<%;|+9bl% ʭ 㡫U13r?.y?W:j{iRS氟AG@Fk㹡L3O" /N>^v*F\"z W.z ed{Wiͯq:x*捷zDD_Mjl_x}4~MҺ?{WƑ {GnM6nkbd8d~QHJ(&CM7{jz~lyu9|Wf5-CmM,Ztep QѐfJOV:ŐZA H=$j@d~]$^t'rXp6X7?n߭OOr*L TcYʑxn1{"N3Ld+B>'X5  A:4tq:B\ 6ȲGPEf%ozi}PG,cJ1Ĩ;shNCBo ,NQ^q^CXӍ8٫hPǫ5]1Y`JaF¶-U *76׷o:\_X迦[ i.|vu];/^l#cH/at7)br'kw[̏Rj͢muƀ@/ + $wV l%*HW HI\ NmeI-CJ\Fk)*``jUg UǶҙ8me9ZX:H%4jP9 zZMW^yI6yY1.!<a)D6kJ%nbTF: e sNXs.9A# @"!dBz{UUmlL(2$꜅Ƣ!I *N9d19pŏ6v@6VH#{ [{PgfwB4_˼s-Y=>#t^yw߸tNNO\z;D]eĘD]Őpy7q6$/0he g^'˹jLi=:Y DV_ھ2VM{/PCg!ž0U4ͦp֔f52:_v{4K=z =b*M{0 'N{mxg dIUBm_P)`Z_Q8iBe_saVjWD,c29oM~?H?N0MNo;(w(ְHw %M1)aP\J%.2C6sC%x t:;̭}H4GC!<X=P1ב%nnbEY 6/'1D fݺI1&pC}L:;I5/0/cR sZr^;6m]Iܭ&DK?zd+ͮ@6teOO҇ΤpXc%dѵ|p:MPR׷-N:9mQW\hfڷ7vKW۳'ʻ'pe2:'m.DOh`>8Ɣu E*YhtdSd4f+[\Ӷ@HDpKεL 52v&R:*aag/ʎPAp%Qy0EIۼ{rjtly npM$򂈘<9d,D|򚐈Y^G-bK'n(`2!EMi &pPhGɷcAyogz+qv#v q<L; v lE#q19"yg C"[E k4 ;Cr-JL "/:d k0hD."Qt9"9t[NjL;ӏ}QuꈈGDܚ C)$*+lR\[B L@=}(WRC` w;Θ|㤕>%Z%r!:L""I\SH+ܡp\4pcɭ0bX=P]voa1VcwVUȀ{4ox˟p)?e+ KIVTtxe\tYt" UD 4Z(˾^HDx8b5,ν#DtH i~KaCBŢBC8.mo>%t=̴~kէ0L~_+9tk2"9+"e[0̄AkFj9"ƒ BϣG`8 4gIٵ0I Yhl,h\H.add^"UYl 9 Vv{V M) BI/Ҋ!bCz߷ĹC>`8"@ٰ J6Ë|:1J2%ȝSYYmslrNnNLqǏ&3ۂ%=Q,9ddM=~7zPm o~730~զb3k{gPOmCm=sSN "8ѽ7L@P: k8_Sk㶑yh:AOi5cܐ"\6b*7Dp{N}OJMˆu~l Ǥ?~kWzƘzz,^ڵ6{;GQ|*lvzpZf:|t^u6@(_^|o?^SIx=\σ)yPi[@nd(27og`ѻqJg"i:J9\`0Lޫ7˛OppZ[P"Z,!~hǮi=v^g!׭ugzڔߒz>iikxVrta66{߷ CmpbH_oMJ䎆{2SUgdv>m!? p7,зdKt$e/FɅSʃ u"] 4A9 8xMN+]{o#*۶Yde`p%E6A>madɑݯح%ek-ϴ1~.X*dL9j:(5" U$!x˘\TuK4:9{T5$݋ka$_F(Qt2z^fz3 iCOyf!>>z/(UGF Q*kY!ds^heUk-S"w"I5*=Hno{czAN([!?e`Lp0xѻvmMFB }/ +Ό|c p"#Iq%;dTq3攉@v  %qzZa1NZ qItAD $Ll>7@ڂeԊ/e; Hae8PxP49 !x*.vLhL ەоlK[8pw)d?v3Ku}=?E>Y4`iQ Hr;<&9b`ZP4=cB&DO<@\ gWc{Cqe{$d#2*%"-DOIˠVY~j<лM{3ƪ_dGX|^|83IKu="S+ƳRBE)Y]-E95;ץ|zs}1s13' FPѵFp&6!ja'Qq8v/ftΫD?zcY(ss`;Ux_ˮ~i 9'\E.6}Z롁$QGECQyΧl+pOJQ*Q=K]in]ZT8l;o|&#<&#Us`d1vm-+ۊmy(q}7Pj =%wu=^Sh-"$-\]1kdQ r[Y]%Һn[XkP;g7n|׸Zt]k-t@v-~y&wz{#n3;yFkv{fw} 吒(,W[ WIYW.\]]wQͼ ll4)Y%ً5^͹!:9%C{d'YA|j3L UBL 1Ĺ $ eҳ1h^fQ)& (3,imr[H`"SDsٞf_52uZ?Dc>;)cPBia `IaϛL2qL̂Jd:A*c"GϨ7x>_p1vz%K#;sGbBS< URf^C%"qlu(XXcrtr2Y[^Jlnjy AϖbZ/huϾP~s?.dtMnpxP e1{qU|.+l:ǎ FB?dib4=)c2׌9Gp)ek c*;;~&w=yi`yo6[D\'*Ġ1 bD29 @C ^r9$#dyb<;NZ9 uo%i}\c SN9^kHÎݲMɺi#ߛ6Kcg خybVܼ<<\To%OVñ)T3 H0wW[ HH{F5fj."EK&L0ĈQt-_E(ſe1{rHaD$SQʙjŹVA(Y ~)4djp), 4q(ȊX/L*&..~r,bU"gO?j5Cdaײѣ!y%ҥN61MmN"ELx_2(/\hJ bB9R5^V-݅u,osZŵJ\k)p$7(5@D$GEHq?: >W"ƽHԢRº!(e;3Hz&:JH7ɴeރӹ tb`Wa$ RiA2F)P[2I,@ɖCc ܧ!#`ACh l~DO2+oi#ӎL1Y@FU^ MHϯd u ‘N p Ll%rIXAR+oJsap6l>н?}L0?NL'/2g.Y \M& WY"$!-DI~-_Ed % @ LJŲ4.`m&`Rqb 35!<',`^tؾ9FhOrO<7^ A8i0rKWDeTQJD"1v$D1)樍uVh1A}6}5荜a8hAd)#) -D> ,eTGh. NLAXCXf)KC8+qn *fw;'!+A!hm4 ^x[1 +)gj2&-tP+:zccs}!;9{fp̽cNr.]E)Zދo+6ZktVՖV;6n$>xk*srU[\ŗrNb+D'G4Ę_ J,=`c60wm])c\>LE#Tخ|ra>pr]>\Tf2LkLi%2/7!jZ0]xa}aRSJAsC/)g]Ke?ru*!RX,n}%/S~+ٖkWH擽="'.qmHV ;u{9G=g{-էV=jejmE몶=کm W4ʊֈ}nALo޽ܼ%ܻպ5 _r#F-:"bxD~7ox+=Z?|]G{)uC6,Z[Ouem#,fՑ/f}֘dɈɒQ͎6UN=ԚrQzoI_WnTRVZ*_,;gw6aa~8YCzv+xzEUwۆjmqB7G4StO%nuF8E+ lSUX ~wp?-6( KY6ڔAJ_e9m}|۔9D;xo8_̳bWn1Mk.kԧgQqEY3T NRdNg\R9;itNN4>!]9 BdtŸZ+uŔ.jytEhɮ ]1s)Qd]PWH?tE:]1K&bZԱ'eՋBT(>fh]qTWnU QQ }msB/ 銁TtŴJǮ+oaϺ<dvpCɲπ7նިǣǺ2ٸÎھϚu]oYajL[C~>j߹RsQC'|vOίػ|0wC^h7cR?'6C)F0b S,ZjZ5u?٭⑺GqeQee=6宛nsk?M\NuMɛznIXTf" Г gd1_ݾͨ# I?c'Ό:xPI~VZJ 3SbOa=2]u.F.];LbJcԕVɔF(@bJQW`2q}2"Z#\bʜ]RWtJ"`/t2b\i!)d]PWؔtŴ>erLy}KV2LFWLebʼwBMS 銀qe2 VuŔ:gW/GWrS?' = ŁuB b0J t%z E* }2"\|*bZbSz̺7xޅf8wiә_ԂBͬXc1)VYЮo˚f4t.m*yv niv 66݅AځӂMA֢.hfT\VC{(u&S Bjesިۭ6Qd^5:imct>'#Lzi1!]W'3ȴ>yw]1YW#ԕ^IHWLe2b\#SBSr I=!`6]1n:O@DbJ YW#ԕu=$+Hh0ȸi]WL y08F]9NIWHFWT;Ӻח0%uPI  X< W%+56v]1ͫwǨ+ZXt60Of0HFBb*d]+eթVgR+=tvk.F;P0J-#ˮTTsC/).-a =Ctrzט~W'Tm&(ʦwd]rZBdJB I ׋TBl F9)hШ7ھ e NB0l*fZ'c4S/7͛<۵eokk`p%V(SGS@նF7m+KY< QXiڒy[}Kߝt}w2d ޤZg~=_}YˏWYVo3zXvbĒ:Xe>rmS /ͯz e"iي0(%j뻓G˼z+6b1sHs$Sr-J0m}Hi%3b\xhP{({񌷴÷ `Z֋uŔF+:!]1MGW|*bZ>"J9@KHj0&+µR+FeocxteQ" ']>j1tŴĮ+&jrLBb`b2ARǮ+Tu5B]y 銁'+*]-Ư+yu46!]1KWkm*bZ~0HJ'/GWfЛSatgWA *W 3h@ Ȳ++uK tE0aN+BƮ+T&j^ rI;+orTX7bewK|nЃ ZxF m<YW =m`2]1.@*bڡM e]QW\W%S"aV]xnP-Kyzg"k%ȦX%@F'=GA$J):u(p~&ۭR% 0hL2L޸Ta%XaeMNǓ ki 銀MgNm*"Z?ԓJ'd]PWF A{&+ƅd:v]1uJktPV)]WD .?r֧ HGWy˴6 S]QWRvE:رqOEWL뢟jgJoF+dtŸ.]1-uRd]PWKjKWkt*bZ'bSbz9r[ޝ . A ! ^wD~IWa&-.@W.١wIIWp3FQbu%SAzexe|~57ߜLn>)λۼPͮs=ԺWWR*0-XO;4Ltz~vh5q4ySV%^]J]Fm*zv1̧?Ornz*yYTUevA(Mwsڞ}%USTiX킏#%oF-2o١!V6 T,FX2*h Qn ~1{$7X2*-bpvWYoW? NRr]zR߳G]g3z /\/.cwSy_ {Ͳw#^\% Too }\\fi_ʢw"Yog8If~3#;{'&?g'_Ukɧ $aʒHY:x I?Lv.qVB@iç߻r.gR%uAf9QW٠k)mu5ƨuM- >Vq.TDJ gܷ ?4\Sn |OVlkKEZwJc^pNnWlW9+/Pp- Q'wk^4O^0ʂ|GܿO] pG%zG4/ȯs~_P~;9ԅno?754noo3V|Ϯ)YYwݎ˫*DޝL9I]^4g<|P~VRF.U-[,]]wVodȿ! 4 x0le7Q]&EyGss^|s20CKzPVw4MYoՏW5}7uLo}qF3*\VO4y JL|hj-/(%Tbx!x1kLENBj{ɋ.崧V颓K>OӸ7jώPx⽘!R(kof)go?೴N C˱?{gxP_<+Z\dCq[MRe,?YE(Q:6ib !$F˕Ivoגp3~&hi;ݥNMYbsaRgp 4I3MM$T;e3DP`RHLj>;R ߜ,$"ˆ'-ޜN][=dZa;7ё%PT7Wwkv>^֋ˢbVL׎"1_nʛ o&3["M [a2b0IG4Xf<' ܒ sɅř&(*:zv8>Ѯ& HYY駳R*u=\.0K_ kbڢw?u?;W71㴵I42JdcVeΟ(=h1ffb9|`Ě-öO_p2JpMrܯX;34) J1'Rc!tYB%n,۽iٻ-n7ٻR_ώX h2>/)i<|_C Ȓ6V̔9js 1K8oBٞC%Q. ,j%=S3z':$b"Hu}SϏnRPK)ȯ+d,,g1`6y tQer<շ %[;g*9&m.hB&=B8C0f 'cH*Ƥ:V &cj!Um(QC;$ !ZH5;(xN4!T Bјh9F#!< 3CXgP0稖/1g.ձOA8>ٙ\1th = Bt01 AN);2'O7ϳ0߳ϿK6 =kHكM~'C`^1A!C8٘N $%dy6%4x(ճ)K12"fl!e߉NS{6ŤW ȓ)P:F0HfeI.0+.JrO(5࿒;;ܼW9;M& df"]J'}}q=奓~<,k =)a`8?ezV8phTƖRMu@qK6_*fEѯj=C}QV C)6gTwI 2LfY%*9(Y_ܻW%d:D R8p0uk oG7ɭ~v.Fu?=)}p񚈆w=ϐ/ˇGgX!lo(B)iΖ} a%Mxۜ3bxb s<{7~$w/kzה4MFehNpP4kR94e{^1ZJ[qTk_⨾q6/VZ?nlEۛ( m=P ?lFJ?ga )t}7_`IEZʉaVNhdy'ɃlxktKь2`M]v@$ZU+mM= Pum~FcaTrѝ-iDT{c8T9U̾O ~VDmas"vM&g3`dC Z!$>,qy"yxۻm(bnlf̿pS3,pwbvYqNvA\"7G&굴*I3xipFlu1QpҕJvNO2䆐PCMfFy# Vqn 6&~M{1+3 J#_R,XGt'QTs *#D3ϋ/cWzEck؜@;` v2LG mq@{PXɨdL(quaeVNwGQ&$o!l>);QVǰDu7J)yT79 1iHrgϦ1A4-p l'TQu\ZN_#s tӆk MuS%:ɝa{j}P2ʂA߾]FJ Ρ4L1] O6rFoq\UU |mllEUI%1M0,*Q Cem(d. Q_ypQ3`MfĪd>%1k)U rx qn~X|*WcYƮQX>"`mR(f7J4/._,HVV6k9vM3N7yӝiJ=|"kL4Y]$['sc[H>8<>o)Qm:' 嗵D5 }unkqz:âyu^ }2 ><=-8ܡE%TX(XKڌTTZFAƲ~%ȅ7|Q&M'`~D뎚"vv멛wDqoę)JgN[eDbX/& |VZ2SF@ʍNݢ8m!'I+{CִAH"xO Ä.hǦSd{~!L:[)|%; C'- WxXrHqyEE{ȑ l&79mԵ(_1X_|6}bR3p6ЯjAH/=A "RZgT-l",)I@ -wfD]l%kyO^űcxA#G PB9o~Z 0s`9>=ͳ(ҩVcWOMg-\i[!࠺H:}}]=z9;nWHMZƲ4C(;J|ưc::nm^ث1kzWŌ-t6UG%~[~@x۞]ѵj8[,sg3V9X\dz~]٥Xf\J𢡊>'~:{Nn_ܗK_N+reDq%SY; 'X7*6[V2r,&s$9߮.f阋ʳsd*EBI8O49q%1Bg;VjwaM|޷W{dHe̓?`~/xrHs)6s9( pzJt^S . TI+O.jʓŝ /&fKa#4hgưB)"JKVxSϛІ턉Uwl$A\rW^6k2/4Y|O"뿤J"[H:q;2LRN^=^W6ׁ @ӺXBD"n-qN%@*tF6׋jry^9?8*L@R]D6zUGd^;N0m^z)6l1.aC wp܏+X3-399w4J`BGxD"l}XL7{&S/SybaPrwcsia_SdYc)9V-]d]6. RW? " .7:MYSbЁwwV``OO4煇S_qE6ᬏ83Dx^ԞTj}Fo^vlY0U>8\:Ţäҡ=%:4,/`m .>T&1mcFB g'=bP!: p7wQ>{bDc'&ۋYxϖdTB"_m.Uޕ6k"S /a}poLޒbJ-KJ^A{NIDZ(X40,yxKɖ14aE3ZKY K)y-uq‡w +y_7_}(8S>=!@?svtiWX4R<8b^oϑ\rk{i*h͓{|9cH*{IVF#A)9i4gB^R f׏q@0BՇ졺ZenPUƉN*] zu.!Mp:N|wd=:{,0t8"Rp 4#bBSC2}b$qaZGHL3h즠y.dVmhTBwM5QbZ\6Eg16J 9ύzzG"DZ#W2!D?Dm1^,986˫ gZ q^ZV>itŐe;#*NHK0{y& :4w)-ȄI)O o : c}:R) R=(VICKjX\^v"f~4fb55C%pgl8.%W(*^"i~Hx9#gU|m1mŕw5h1Jn㋎BB`Z8M:&q9'P#@p6ұ@D9I-yCg]a>+qJsD'稓& 'fEfh?ou6#uyK$U~t)gG{ -L.T Y"-4۬ЩLAf4w~_YøM7}HRgܮef[GޫzPjUߌbύ Z8RhwWe] JkT47QslNWKOv)iT5 ġ*np[^m$-9=bXdw>UM(e[#XpZ+̕C-e?3&D8&#!$DP@lRJ8cf_7 PuSO^C0F"x"Tp*QM@DtGCT͡=aҍrƆ31;qLpL%rK| Pڒa mW -M@)֍mZ lIf6u nhB،9N7r"G"iN7?t׻LnXQx(v>wgɏ\nbd,\7P]&&V pe#6js)O}%Qypo^3ptש5sND_9W8$)WW:R Nmt^0BphdP(8e~;ӈ11#ti7.o0ᆞ.Sp?PZ4|45`3U|.Z ^@tE@y<&1#f~Z[S$PZC*y0 E7` YR{[Uh2_b҂fi,sEVEє4by^Sܟ#.8F:y,ZOM^-mO˅U/9FcEi4gBK%_?J/7٥Z.St:'#GT1 X,=H\rA+?(2T  hF{7{Dz%4ŝ")9>)&;,m`NOg1hFv[U}s7J0 '#rGQTdRW C{g3b6NA,y˓) R=(b8 +/i>n <&8<$!Ibz[ˋ+goV<ȭsܠ11O筥᠀1W}Y˴y_ϽǸLj_ mFm*GC6Yٺ <ùNư뿏{U6rv<Ƶ g a^to6^ԙިxGVWb(2REQgpu>VȵW C0LQgĴdLPTE@b@y#4'0b(Bľ6Py.dQ^F`J6$ݨ)3dz9ɚT ǽ,4y#lVI|%xvfL_4k&d7fONFٰ6ٌl/0#I{r6| q~[LMzC'>봿*:}c|O2R׭FWSBD&Cp`X 1;6Ꜫή|z(Ɯ$?=x<0ft ǽ`|YPL":ypG^˔<_4V6`̦h#x}% |OFOtD- uD$Sa٩V\l|_tP/f5fhû1,BKTQ%/þkj=2dL΀=ġU{>r桪:I-P15!,w eֽHY q>Av\ė<׼Lb'ׁ'ur,KʰֺL@hca \.Fs'DYͨqLPV +"-e+d`-c[- A\aeQ b݃0>/ "8GL@Ro+G8)9!K6Mʘf3ӥPgߌ!kmjbW`.aQ;R)/m;9܀Soͨ8#0ܖ4#Xu+U\Eg"zWsg(S8w1"v_\&NmUTu@bW}ʅ(IA[yJۥ.Nu¸ol 2]XJY~d<C&,~I NL4lnY2 VFpw_j(8X#ҹTtɘ$'Fwq6eٜa/rNʋy$+zK]&'rh-P/ρ>ARoe- v e5Qb䟘<ιvru;rM42fr?\>)G]I)}/S)-?ʡiFY.^sΉeBռߗQ8_vB/~9 @GcX~Al:;Aӻ>$l= U"Lњb ̴ޟ|_ߤ"`xÂ-??{ꟿZWSQԁ}4ط}w|SzyY aϿ#O7jH{51 >O>F:zH1[4 F=`hނb }^Z/QJ&st0F諲a?èx"$VZ!›@_n 7mcɾ7"FURJmIB-|[ ad)S9(plAA6.9򵫎Isbf/{f%-4c)31 |xN/2ӟmw<譁37zCDK1RY& c1 GMF.y+8[׷M6>*њE (iBA`LP&K8XAc+wؠeOrs(tߝ JX&E[&AµbJSkw;+ ie)m.\nSk[=(hoڥ+uH9L0{D8,% a0YT{Dj&vÔΐU6sڳTFs,OaLngoc,I `&iEꁔuCf A!2dgH:Vp}􊑶L y\KSLkd =%yd6-H4$Hk uFr];jR)M5׾ Xx#N1ޱ(pv! ^auadi*OǕKK$Ke+-&3Zm e%z?W@@\q{7/,)n@}|u΀'̷?sεc"(+m1 3d[:C Dc$)!%D2mwm=4x| YsZ*d8Ec.87cVX jG9d$옠.1%j,'%-•9Cuʡܷs8L*u-h&Q@"ќT{B~Rٓu4;q*\4Ns'( Θ ,V ۩XiKV%CȊޡhreIv>q]X--CyfnsT%ai~)Giz[)h)G eٿ,#!԰'X>IҢC.. ܁52z͵87+U,x@#52"C+ax\gKvڪ~@Lxh#h|Q Ɠ]e)N',PE#Ifi6KwYTI_P Oel_ :-ƳWa@l5{́A5G/b_SS {!eS&&LeY,~YGt| ^(TS%e#Ddݷw#|= Pܮq-qSS8HFF1:P Bǎ,dF"ÈݑǮ|vl_ܶ^cz]{S% [Q^4v4I*Qd8i4H*MȤS꼊uӻ_$";(tU;}A;!Ѿ#FF;Tԧ4^/=jd!RXwJ<njIOsikj抝/` R'Ji1~= ю5 .͊JWߧD>׼Cxl΄`<3Ht& Ev_H3L6L&<1*^ ɡx>A} XIc \,"[RzY26?Qn5#H'哉VxU 6g&wKTe)Ddn׽dgn}2S4*zR COH8Kf')Qn[ibϘȭ֧$smJTp񑸙?f IV>S1J[9ȇ黜fJֈVDŽzvXW f e!nzKv*'<=-BOOYP&|"0{EUˠT!4q*nz4]sRruTo(,gASjs?3ݵIzDIĜ(R=YWE}pIllE2׬tfҮ~Z0F %RZ˒ވWAgȸ6bCb!7sh"qh#s#"-W2] 5ȽLxFHf/#ѼuLs?" 3PlkY3Al{VR9ܑQW Jogۿe+%rh9P4hCch֖؋b!$I^{Ld#C>Qҽt|OH26Bb#>nkQ'i%O6Mxb#"T6OF{-݃NQZ eEB绗]iIFMu>~Ԧ[Lp)Y֯ji|P]eAOױOڼeRgXl2w@Z}}&:44ie:s@?ek:<Ǵ8U~W1XexP ұUO@&)Ls$0!VrJ9ͧS_s jOoU*pfz3BVMetqYر,:9^(.F9)CtyBVPF"Q|iً^Ķ^Ll͖X˗_O!#4p =T2)Cu G(3*:&w+lO'ٺb bl\S MD+_FQ1?夎mPx">{~ VlCM[~}fǗX& n8g./O˷KyyD#r?_Bp%ql-,'[I#Q4pq"¥ K(CX d){ƣ4]=ѿ)Kkj5;Ed)ڛ0Ebea= v ?~6__InF?v_#\ɸd~ϢdXe;Km_[/]8؏4mﻕwꯋ^-[{}V FnKۯ.G'쌦hds+-Z8>خoU0ອ Wʇ~:C AAc[:I 'UX0o|".;b!U($Vj*q`EW6n mF8봥ę̜L۠2u*59oihRMl@@R I!A/فbυfP=UĬ#{ L 1RuQQDENa*;e%S8T9P|+rC0:jB;QҜfI1Y G12{aO]]y&U\4F?g]! YhFD3-Hv&#A H‰v#霥+b8Sq8 v+HX2Nsf*)NqOLڽ@"K=SZ* 4+k d8&J4#oa;87eH,zs@T#*u. xo/0ӑpo=*۠FUƯFP^\?0<@k#HY( b 5R&([䜹Y@sDgwUJVȹn9j-Aŧp:$(zCe9҆INH(>|%@GlkؚG~[NCEZ~*V(BrTݷEq00j k`ęĪ`~Fa dZ-Px]#kJ꾁Y S-ZOU5l_q%]ޠIQH4;76T*΄=K8"q];@?蔜t DXsBrLG<_Y.{} j!bJeW}O\ *-CyF5!`0#F}ZӴy:%Q_ҷ>7S!2arE g ^[ܫar$\ c4\?7' 7$]u\**Rq`9&0 O \_:8 ky;YnS #$5Ym7 嵓"i!c/_0zJfE5-U@7"e0j'*3Ӆk;7_kX_B(ͨuz`3kd|75qbI|[57p52צ"/K9TE*:coer}7b&o/X#/L6w}fdmvU.7ۥլ&  !~0OBP0.ރ^0*õyfOA$)^?5yQ lᓧ[.LC7,Ƴ`<mw'H8:_Vr_bVCF9cud qj0DLH,MyTeW(JE *TiB2 ",m)E ˣz )7LA> it0/jnc.R9WHO efEpRեLmg.Uʫ&r54yC[]6zPgf.nnXmZLE~{ߊ{/R VhQbkm"Bv#~Lpm K9^pRPl=ڏƜRDhm[R6 UH(EZ#JRO"ӽrmF0: {( x>@&ȸ𑄌u6^%M_n 4yI~B4 ٞm^|ҏM00(mrmy=}="j |s *@"RR%3b x RQq/t*ә=XԴoΒ6Er7y䑹8DL1lr/$$zTu. IBck%# fDF,.~n?ΣG:$B> ُ6JJ16*T"A[_߳h}:0~RȮ~\CRޭlS]HJoCsf&JPLg~ -3l!CԹ E&Rffؑ`W]y slod8? bREZbx?O@ן&?, S][o+7+y -2p0X` $;yXA7m˖ojr`e]ZRKf%_:@#Eź+iCfݡ;&G$v5U\ W)}|)}U2M&NxdXѻ&-|VvF-odn|~3ލnEVLJZӅ5ϱ1H:dY,|֡%>LyMeV'NEySH i4͑ͯĹ % a`~\7YYۇ; )BuTQ Êi8#?'Bcg 'bjgK#t:Ƴld4>HpAEv~9a@vNpgS\K)'6 {V'.+Q6R=eckjɦI`> UcrW4gIV=N\׀7`1}r`G VuXTz-B-?a۷ j l_o}IyV/hBptlҢrƦ9 urV_[`Ɔ3 },eL>f1mZPms\P)L}ԢZ JmQ d_56h0%LSܚ`hQ>j[YjĢ^٣Μo`; ̗v\ZA(9\go) }@3R8^FµB@$MJE }7H5a X! Dh"DhqNYSSj!]Cqs+&jAwɽ Z`V#jn[XRw\:J;m %DǍ*wCt]1A 0K>DW@8/q@iCt|Bu^Mvޟ| >VyR&{EH LEZ7*(;H8`Xcچ/dMl y4GhN!mNČ#%P p[qA0B3^Ƒ(rRB ?]~r3Y4 Zyaa.=yak2ZEQbU y=φ.u6'.O+E?6PvlTc[g&rKK=,rd?Uf F"G:#'3y8$vdM'ѬG'U9ܾ&b-je"Yg5)P$T[\4(ۗm{Ni)vdgu X7Uڍ>*}9Bituep]4\s.2>GX߬:{(`*p[YZt:.meGm_R\fƚud/MeE2j\,vzRn31{d/= ЌZ\Cf}b2V$O<P^IY nQʵv|G:3J/._Jj0THyD˦j9M45:u(#戂^_ @/Kr []kʺY6h{y{ifX㉲ aRNF@*pYS9sbՍ:8݄>:P :>x*M5y'Yd}debfmL >kgMՇ>xH^+嬪tekDTPݽQ0])ARi׉YL/>Q˙Y )<^H*w2M&qʋ碥FFfC6d<63qo6n)b.ߞtm֭{QmDP~TEHZ@zgbZ^۽%t\l-jGZ0r P$& 8OV1!q%lNci"jfMWeBpEm{o* a%5ŅQT9 sfuQ":EFk騞D6-*Y-V2&ۓJKםPSe*a@$9﩯%Jm<#RS2D2*xt|]\x/^aʖg.W9sR )̅`Qhp}ulqΪ\\-O\̨'?q.o1!)6]g'^e79y[ߕ-9V0iքdTkǯj?tL 'mCK(02)YY!@I%ueQXeAFVi0dc@)dW>NRKO!o\<6b)[BtB-3O!)jѧ?f ٠WJ!sGT3QmyRO!)=@#em+FM/N0Ygaݙ&NFp0W *Y e"r)R^JGlcЪTeý0GYc_^`*1Dj+L<#IbpÜ94̙c/_Ƥ؜f:ZI"ܣ"،eDHd'p $Ac kX+X+ЫTln ^'dMD9 Ut09PNt1)d-'6e젱ZvZr,Fq#VslmMTz|vPd"ˌa\r;HswA6Ob2@p ak5rs_K]ZP o 1;H1AXFPQf)A+t /yIbVSi cЀ{JQ7ܳ⹗0Aѱ0)fSJ$nZ4[;rFӗ,2F ]c+zh>HS34+!-gosnNcljRLh5`J Jf%׀;h0I\ ١zMQTȘdX~T>.bFCR\LB(rZVm ^&Ev~9àvz>'<;R U'x lNp8 P?(Ÿga>ou敇LpmR$ЙI%%,rTzg:;_oA>;oMҔi2},`<)lF o"hz9w L0ThY5fV'-r3,ntZMVsĂDrb'ܧA_$MS3x#\_e*PL݅DݱApyT{8uƙy)Q&s"K9N^pd/][٘Ի񅩁(y@ݘ;4; h"K-…ƉX]zs 0(-x1T \}wl)h>0a?԰[p!PHC'Ljeєq> -ۧT Q:ԝ7rI &9=ɸ.tkN[ Х'c}lWwENt+\"5 }B~\&^ {%q Cs[I=RV$;6^w2Qr! DR!mְlRZ50 FKuyBs'V8Il.A%VesKct1*R:Pΐۇ2 Z=уnC;I7T~1jqZrBhy _~h3ɵA?'ɉ?2I8A7!RYmi,xG_{dr >\>5a }Tp7ѣ>.' ӥ/`) E'pk4H݇}bknNd;>c8gEcS(#ˎ+p>eB: &S.ieKSsxtF%n)jBSM|B0%̛C4vcS59'F#eTm#=9qNbMyat #ţ^W@!E%2S sp3&tA`(r߆71$n}q{o"3^"@CHҾ}&Hf>|S )$X4cb&9OƵ$Wl'DIguCc n}7{,a0u@v^J7y~7rZDzOXi>c91Pcd)C*  ,]A#Hcrmtߘ&J|e`'ȫItSoUuU>kBpfg O; ?1vM" ʊ E[#䄛G;ܨ2-4ꂁoSp9kp-eEW. ϡA kQI1 ha؊g㛑j6S M2S}3I{w#=6G.bAkS|QL]Y-qCk9+-?j<CkLʹ'8VsWa('w0w`y}{Kw{Nޥ~WR9*_g[eJ!roiP嗿\.?ƧYɨЋ.z@C>å#O>|x;ScSA`Gy$Um`BPuDE!sXg pp`*mJAL j6܌?0!6UJPm"ap0X^= 5MQ)BUEPSnDNǰ5M,YGC%6ovk %NMY4tiIl5^i_g+^;CD{DrD0ƌa!zl֡p^LU$TxeWSndzg?2$zHZ!i5 Ia $ñ| G뼼'>!D 'ԙxl8 /wo/r sS'#G 5U !r/ӱ 7G*WqTsncnԔX*0qlkzLc܋ %"$PʨK.BNQS^A vB k xlE~v6` _Iǰ \p38X 1Cqxa~3*whqX 'j+}Mo)7Z C'=!bAlI>&ŏkR'"`=#M2fe yP^JMX8Mx ;qcؽfS<;oV;GͿBq{|FBxSH砎k Lq+29P4W%Ы7|x6#n~BUL8@el }uOcYk"΋>ts+-U=;= [SA! Cr:LSN :T!},QoQ2/:jhcCwuV\b2jbfKfK ks}FN| :7JkpdWԄ1~1d~)r^:~&F; oV x7bWp]X!y6GͳN^?<": 9 ?! Di6'fs}!žD `7<:x;Ǿ^k'v LZj z3<:2oaGe@/ H G@{?;C3I!Cf\j`9we0!s^vMm]} gwnvnQ} 0{ Ln~sZpy85oL0756bq7F3J6Qeǎ u^ #QR>!0+v`v\@-7Ԝ#xv3ۤPi@~N @2Dk(&g0CmG%_M>Tl%fcDXj[lyoZ\|XjbuvGWk7J-S9.lxlTB_:w/-Dz1n:-eqe66kLkv>!9cl98C0lEV';T iitL%4I:69w-3PA$H WlvCf +B<[Q1:x@e] my4[x#Oeijfp{[ DPP~6aoP{M֏|hCXpɝ}i%탳TQޟlnʵRBrޱ"^OIVoQDNO~ek\Ϋ?LZ#8{l߸&<>$ȎM{2ȳ6{l>qwPe;W:ù듗Ϧl(',>Rlw2eă:;\_~~E #zo |JnϋO?x0I9_BYqrZ-`u.+|׽|B Y(`$T-Rᢣo\&r!s>[NN0wο8+vAam1 ʆ.ڲ2&t@&jST*ԡ"(͕ H4V[D^ڕDuX4X嫩tRh [ЬU4A/;ӅXXAtp_=]o?_Urϳ讶 Ҟ 7 >ޝ]?Kn}b%:by~gO?ՓǏA?2G #k<9{_ A1/wgkyҗF<]mW"q\#{S陸lAio~.~\G-%W'ٱMޟi\⮎gy Y{8XE}6k:ٓS^UKSRb`iO6Cc*QJtW2g_RΩ`ƚ-F:6Ц>LQ i/jQxӧ/|fUNO{y.w+$bYlural?u ,CX- ti}7`\`|q8Opl8tID Dg _ h23'Hx@ n1E0̠8բgEe:VKCe;G( #Y`l·rJQ-d2TWKmx A;ְNx|x8:/bUOU=!VX5L)x`om|MHc1TOs7U1Zza(K>/Hɋu@S"!d|`("H N6+ r(k7c&ܢKZD U!:7T[&шn/bD|ěgMMO0jޣ:c/ӝUOwV=Ytg5LwNLXqWJ#n ëg3Yr .?9[GSyCH*O/[ݭGYHsIhԒ[c[NO7$Ղ;g[t$)dc#,2+U" bQb- h`F+ߊa6/X$cJ|+7QMl7[jhaDJ'Ij&Ɩ}eLٷiKt&")MxR\y6Meg}T֧jlW/b&P>ܔAkƣΩJ#$e>Y$-#pSI%{o5%9t/b/?8R&cӀ)aLsdI*,Q2TZ }xװ鬐I/\r0$#5]rdš3 ϯۤB!d4p*CڜI52ё}BXi%V=dwK7~(h3΅M%Ȼñ`7]FL#z NL>sqk㭽S.7}rp&LYnu'VaB 6 nuҾ Xw'~ˤl:NcP)g,zakw$:y"G`c>N#y:ב1'7l}DkVxG;}29?2wֽ*'rlG+ Ne"˪'}#oRlQSŇ\\@wR#fۂ ѩTZk%;/ZTV}?X=pX U}4صMx#o(1_e$R\> }zA08tso< t#d]#od:n*&n(̌׍?59lwHb'&u~fgǍ+>Q ߥP(H/#,m:pI@u-$Kc%GBvI[T209dTytsK?_YzzԨ2x~|AxbjNx^]z#Rhqa40pbh;V\LdGdO,ҲힱBkB6T9O,eቅ_pUVhNGmO2JeDY' Ȕ}߶fMo<Ybm=;978Y5(zn9idϬ>8o'B[ELv-jGx8A{GuFVUHOL0mSj >!ݨo`LH>BK6,Ed?12;'-Nǽ{}tj~Z}t6?NJݑ[ve5Nvu_m>'W#~{C.},S}_P⯣W 5]S }jzyޝz߬ek3'_~ե驎7x.D d&k~5!QZh7Bsu:o4R9 e7R7{}v iN#[|J4Rm 0e"Xah/ 2$ 1|Ӑ/ 'GςyMV O JXS>]-SK.8*:"^GA*v%ȉkbq lCkZ- ՐGXv&T@m' d,P-D:BmP?hԟ\bGV~tLٷP}JNY6YgV?E:HǭO[sCɫ/ $Jŵt޻׎\G%- [u},fw58 X} c%`sPJ̅{7NUIr҄9JS&;!?rǨi"H;(nkuIRPFM&Mh%mS-+Q>C%B:-B7VJg$+q2)L b}w2]Cg@p/yeNiR3$մ[I*͑u(<+; z/]Eb'wȣU'd-X*Yvqv\2#sɌ%32~Ҭ| s}?^)%.D%RWT3p\{jk.]E"%`ZI9g(YM/g\EeUમ)]$QDY؀jBDFyIDSmg&FS"w%99کF '(pS{*yWE=U"kN캡{ iT8vo6q0 k@Jn'rWNMLT}?g[+E2,Co+ X0c7Yc\8s tL =ŧO Z̢ tS{̸sXT_''"Zt(Уf4;w#jȏ*н|k(Q3?+x=IO*{qp q %;z71O 'gKT #C0Ro?i%Dcr~8 ̞5A\&epT,I*s_E1;Z2Rj)@FTqjo1瑆K66X} 'HaY@ahnq'@k9lۉ'_PV[j)T8R@h$Թ3ju)3 (!V Z@`.&. q$dr$( 7}#N},:rM#|1׊}eۙ$V:3eTJNɴ%u5x:oJ b*ky}`^rH)C3VլXe=bFqnEmR׼">{M/~ys?{gZ6 u(\t>2jqJ= u7pjb7?6UϺJ5̜_~Ӗ~#$zᕨ޼y=)(ś7]O?>i8>lsI5Eu.> /rX{߽ "$;M#5>\'5]&m0e{i(@hԥ,?'I8QBeD1LkDzEou#Rf Mk(F¨idPsSجH͑ =Os3 yTs "l:FoPeQ._%.WC9M;'E~~ikRz oՙ#FFI aOHŪs\jyVGScZo"@UׄX|^ߍ1_Fcrlk4bfpT;gCos\ji_ ' +=A8 *3ęlؓi> ٯf9YW~:V>m).Eez ̋B:P#DܲL_Qr<ϏmM_B&ŀ3$`B  F{Or>tp)9|D+\!/,x3U3bjN֤[A3Wݎ O[xQ R!{Dzn J Avlbf5VM$$BS2yb90MkJO!=d -`V|v 3(y1 LҦSNdngd?,0ۑPגZwߪo| kag}|wըwW8{95rhCUT JKtE6[+x ,P Ѩ[&Koik5"i](۽qmUim?6yp&ݾhr-uq{s9rsz-$}J8Q8Qbzu4=? 8-P6kM5ǐc0kq͎`FhhPUgfGIk@ =z>y1>{f X۠g`sg ^1xw3!rb9f`jKn!['2kJn5`fS-uKo}EIx"*::n =<;9YE<PLǪuUs ۈW4΃h߲9UT9<J0ڢ'X3wXy~IٛFQVWBga$5~ c~6ݍ4hU3FyOP@auX}wͮzf"g6B*裂))a*u.TT=(w*ρi=uWu_hF+Uâ|,fbTw9̛Lbž8]F79/(k( (O쬅bOmI!/Rp| *Bxr֝ ^DARZ)kpzRZ9\3 r}r+8\b#3+׍ (W4O5ʥ 邘>\o\2-ftߕy|`Nij`KPbFؕWHm&PEO%US_Qj`O0>zqPT6A+^ 't8Rߜ@g4$rYXn )GέJmۙC-f5(<0Wd:1Ӑrax^Vmie};餠wf'? dA܀yឞ)`8m\lV,$x47-ΐfpmu'{A 9ܨ6*-u%w8(@s\J?oA+-͔$c %TJ7NQ?;|b`=-)~]rPmegh[tFұ&=d_G厉f-1p5XkfY{k@|;d%&DM'w<0gDo^|񢗅>o?>ەKik}?d;c З̦sgm>|uܔlϤ&$wFURH&NJ lTI^ DW<9K8!7#ɴ b{1˛#TOК0w34%醶.GuoN*^s%X!sap`-bI kSǞ93+D2N3V%p6 IǢF(ؘuRe@Ikg ƞ1ؔZ6nk$[\WH,/P(#U0`$(2 4>Rj-*(`ELӀo:~vaglWy y9 U 顆E~GOfp<0'V@f&^6N%暾Ae^/ohdKhR@U!+x)v5/Έahs`J,~$2* Ln v,k@4䄆7UN*oCw_opoû]Wh^k͘3XsE|cޘߡPH7#ҩ5Z0 3HF<-;h(Y4>Bn$9t3OLfMo RdXQBٻ6r$Wt)@~!u3{蘨O[mY_ JJ$"YLhEXH$^rhSLmӦ8O'2-D$>5 [~TKCfɉ뜸!Z_\3>}j^+]Eʌvu"oAU40hۡ\]H*m/kp4tu!/͜kt;.:ժ2i$ _ZY95S[kMGAW9 14vU]tl3 sH-4kfcP= q0, >s.6Y4LoI{l M Z\2K3P>2Ln8P-76~rN}P1uR]dISep/UPi:h{JA86 C 1Q>}_rzL<>"jK-3Q!Wo_ҹ#FUHHzK m[! ,R3lcQeD-\֭QFt?ͤh- H25yE&p}gCNۼD2<{,9j8hRn6's Pa{ݧyVo2{ZlpfF ޽8/0 mW|}5ʘwpOJ*!\ I{ F+90oOuZw54E{GRQ͂y4rƟ"s`\kiC MVV>R6ͼ$❤>|j:-*;5$+j*X+кշl&YUl6F{g!m}Ŷږc,$ľCmaZ^b*2 7FG~WMe~'ZN5w^h4TvNnQ{XM3|c!mbN~vX=7f3;Qj0*DkDµ·P]D/m ~:m~,tYbO ǃWhbBʢ0P#ú{K`2cP^D`,AnK/<8jWr^I)b31^K3f@uNȍ +337 S3c3,(pY3}1Pxb`:Lŀ77y;Glƨ.. ^;C9G('&ޚ\ &oǹ" D(r@KC?ӳq?9$Mʼn=] e&o& $z1B`ݝސg2e轌Kj,m%̐uiof ]T%ќ##(;;ҌG (@yq'{W{(C$0R\};X0dlL fyKj^ زɛ!`A=ְZٖVs& L!ov a% az\/P$0#dt%CO<^[=lr%POx†Q؟R<#C%btB%(U {Ճ!.NSW1^i$#eXidBLW@HU`WܼO7 ɑB$c麪q^wh,(頭u}赮:j}/UmA;T[}H?i{Dh@]_>G]^}~Rr8_.ou-V "H_jyHT1U|UVP~+vƩcW7HTT[$Wl M*[Ц`[-M3NJ,8EE }Z1@ێZ[4d*Pz߱- [ͩԵ<7uBMPu;=ϻt1rK#xGUg!4$kY,?dy~]Q#ش<48I=ƐĚ @m <mB0#"q?oy[ҊXk##joNb_ TWjX<~ #ƨC,Vy9i YZG6BG-@C+MlB`YF42vwQ7ou#2&NWQ]g.淍ԙ"O?]0?DvR#7V*P':FV={z9a+N_Dax^COыDy!8B~odA?PFPP?TQ}{Fgpj;-Ga?naF/_+=F1@5_M, hF^a=zFiΰF{)9 0j?\DvmRޟCz^Q]ՄXܞ}@as_? sݘ] 8rFoo>_pwk vmC6*?Xס24ǪQT~Xg0+㴧6Wͱp B9|tSd%zi9[螵^XSb b&ofH@⺰-ΦWdž<3hNuUAV/Fx? )oFMވbPT J8%n`31 7B^9~6 H? {4 b5 ~^G ez@ 6Lx7Rab'Ho7lٜl\l\a5<@rWE/ۮCwt5ܖ;`o}YDܺ$`8jFwPG Y%! \TyMZ1L/O>#J([zhHyQBZ\-({GJ"^#$gPոPVhބvfgè,2U) +nhsTZhffJƾ 97Qm_We-1{M6 (#2S\ s⛖z70\W%& W#B (_Nj@꒐"ϓ" -7oWN|cG' 1 k,qJB| >jw$N#BU Üw Lڝ-ϝEJlOgWgbRy/E%<:iy\-#Cp!8XأG'm}>=ox=ph) s9 oZ£-8;JPC@tJZi=G$WjK H5jw$ΆCYcM.ÜxB, 7ڽvjGjTƝKG.<1xX]v0J]-R; EyiY{L}4CB h-S;$zyy6w50i,nEsQT;4qbb3Ew_g!~[7ssQҍ쥿M.dz..@9h߿ީMS8Y r7IEwwWE*;Bm8 _Y!G |r:b=GqPg15$}BO˖'iHi}^j'[BX}уZ9rީu{=!v߀*NHO% gPWJj B,Rx[n)<''vrE o/yG*kxkVI)8 ~ź 1L1Il(BKc]^"߸:'-)5LI4,Bƒ)Jkc3֮Yg ]a#ҷq'%%$#R%cbXv* $sK2w./=v~_v<9g8gh˾~ -軤#8q'џ^5 8AޑGF5M'YhOpHaM ;p+s;B+xb8GC 0Oٚ@{ >cq_?uxIKŠ7|c׾sA)DU)ڟR3yA@rn8wRWw6O? N}NT`x8MD^T^WB2WMG'wWk:_dy(E&v`v?>Sq+^vmKM0J}L"IE9gA* ˙@7{?i_r9#/QL*c0FٺYbI<`\j5lr~zpZLv(G_Ӡ,.{AG+ȼEkZ+^ZV3Oq~sDuyܬʆ/IA|n7be5ZYkˍiVVҾ ҇y u-VI V)[YP k ϼ,d-ty\9ӛ"I+ uRтw|+2̳֝(#wRE1brm b^7n(;v.=͵ k8hUHsw_aB;"ND=b '#rҷDIfNjɾ s~!~8y7^} am;%Ln갮I ; u?)J%^Ehj#֯R"wpY%ԦcR޷z0P(VoLQyQ!Kr/!VɃ+@"* }{Y)iUV*p3!59ڝ:<,Rdd׷e5ZN68mKL<ݖ WIuW/i\26XTpl)qX6t70Уd5<1Nb9 CbsrzƊw(ٶRĬk \V7\)tExFR)5]T;v`L!,ER4 j1E}̼bnNotَz[]G\TIU9$D1@tkM ک^57]J-Rd;)g=<"4vԨz„2{ c'xF]AAZ#7|NÕ}{XXkTj[޿%C55![%v m!*O 2r)fNZn=sl*;˳A..'Gy_}hׯ_^4xׁYUuOVo (W/.ήGO]vnRM> {?Мu~yr{kOn|߃hz=u8%ןZ}i߇K=2$b ^b.>][ٮӧ_4W pճj7ZϏLoj{I<.4"Z ˨SD @;ᶿY7TD.R;ưr^|φB@z+=_.uzOoe+=4Skm?NӠ ͅeY"IF P$jEj-.^)E3@jѲXw*p8- h,IuWrcnhZ A6r1U !nysʞKAjPRBX ԁ6BL~>>b4U-\XUkB9~?鯟l}5 4F!71wCd"*|I[6hѲrՆHý:ͅ4cC懳4זbdLbNAUM!l|ntR} HR3fSUs&E`BPQZ;QJwLz@HNDbSAؤ) "k =ړU#][> :9vGWյ]ZJAV=:) WRa#1>gA od< 2x\J>W>P kmKZDgdk[;JL}nIQRL%4r/3vkc'fʘǕE<̓ 3i1i xudxFN`lVY3ZF +nYm6FIY;NE"j 1`@N*TPg1n ~.Y{0'5hRcV\|ZTRƋDpנ0E=rB ,<ɘ<՗+yNixsm>6m~@8VVY׀B;  YXA&-v)ւ 61DnȳyըpA۾/j:N^\N~mEfe޾8_j& dv.ʶ*Fe>*f Z:!?`V2 3%Pٿ@;B6֍*;nدC1P:$OP4K`;Ҁ%o$)Y@J;KmKNooV/׊Y(VsqԨ%S8iӳ-R%+b [?uS%+ZZ@;[kO?9 17Ac3pE_}O*Cj]W⓪V]m6"(ކB>%"XoXaػΰ^2yƜ=0Q° ;WOjmv \'Q4eZg5u26, (/4YZ+GÏp

XNTP'pOpۛ q ɕ<@rnwD\ Qs3;J gRҁ`3jThzJ 0}33F^۟w/?si'$_Gggٯ}ppkQ y.EJpT ߚZf.?C^+csؠܫu )dGdLmQԁƧލkdfx+. JoL}@ln2ؾk[$< UQ:Z!£qoN# j˹RI) vS8[]E"+i/bLlщͩ”R}HBlg$9Myg= ꡋKh}uhnd< 2BSeJbAv)Y-jQ %./x֐ϐBX~K'5-x7,Ttzː:)heHc[ԇBJ!"MN&/|KV*p b$(xW}eW}ev_{$ GR$@너d5X֕E6h)D8)hLi;+쵌f;);zv%F/ Ƚ|c5c$e GiTX $DM^ hu>:$Rejb4qx/'W)Ӗͥ6FGP{KT`Tj&6P U.Dɞ|̥ 4V31f{ii@;$Xٻ7ndWy9`gxS ȁ '͋44vb }n3fȘ8c&*źh,lW:(؅,.sx3Zaj4pfA@c7FF5'j]zKI1W "Dxt70bHgJt^y k:IfHk =}2sxqֶ?EC!Ujz7IRDW4mMӪCf,1nc8m Ǹpj #b\c"Xckt +9= akʠ,1a,0f(nF˛38jۡ[b"|8C)HD8ZDf.Zc ,,Z'x!E\E-$hJ68G6M65M6LފRD4 !$"'b(@ ]#$jiYzy&q3;oVR)K)U(~fMɮ^$aנxͣ=]ǟ.^Sf /^S;9\Y>{d޵ZEخ>}~4i4f>5'3#c؏hoe+>ƕ_>pǓUEJ4F6ފM-ـWL M;OT 66_TF 5x[>y+,Up5bb#6ư8mm}<3u1޵--ؽ]FFS ςf7Օ6[fmIjwō,P`i d)*9 L3y{ݏ %uV},V};yF|V= `+Ś4 BgƂURwq١kP$?wK[c "!Vhgzv\Ǣ*G5ΰ3Bt3\ܹY,DM eY .tń1u5wDGRNgtٙ侙j5qdꝒ5|o\!&PܕCesFA]u7l8ww` E"~SJ ُ5_Л9}fЛvY6!_b噱(y6/1`na)zlj8WPPUd{P3Xib^u ~cLw{p܊OB%gi⤰ );(D0^h-;H)e-kR.Oa"s(! $ %GXX;Gt4Ofn35 OFjGʸ^g N&֒*GJyNK$B ')>KQ|`6 \Gd<{\gla3T6A`NpXux>H+`J0, ÊJaJNbV/ kN{!Fd`TVOR47:Rsx\ϟlT<⒴:ԇEZOU=W )t_]|{T\];Sg]i06 /f@{RB8Yj9~js%8,?֊bkS1Ne֊bCƞwMq5(zN)Q[TI%ǒ[|9"!GڲG=#;l+$QGq4/k{+ BA#/ 7❃c8s EC-K8FA}0\xP * EIRfu9ov,.X)&|R U6\)#fP*?A4ư܎Tx=u}b;UO9ӐF)nةhpZ1X( ;awkI1,C\ul&DiiJl P%y:)3TpROuyby"9f/Xϓt$x'WgD.ύ PY(ujPj GSvq^45ΊK4u#f[u*Օ&u/24>`EJhtL?pma.' yqz6I_^ Դ㠥H 9ZskL@PvJ9c2SEZaMI 8E,ǵV}&fa}uŌofh`If qk;|a&Ɋ l>PDj~${.ڌ"Nhx7! XP(A2zTsP]q ÊMyI}U@(3IH"Q\S>GuYRgA$V* % ,x&8Ht^a#ޱARC03k%U@b$ԣwMbR$2r 2-4s,@3Wh3"Sa4n,4vy`(V|l3\ǵ*˔vA1Z-c>T2V6tSG|kA L[ 6DI&ӊ>WcD .JI5qU"yJ$U`&.JMٮ\w9Aԋ|k<#B|ʐ{|uxb)O)ұpnRsʓe:9(9j,;rH?$Cì {~z^p5,E5 T] tCئ5?o~V{͞y_7Ǿ<@ 1!9fy,(ݪNB VG")J :" X%ZU lֿ cr-3 $Uukpm%Ǥp+45N):xJq#Z)u۷O[R{h _¡>?w\i %1p$s52(8(9qY & tn159 w9RBI;0䷕΁R&$Jpqc_ZE04p'Kap\hxv!@ne˅2;J")`b%sWy¹s%+6Z >H<,y3V̭.u;Ztn<dke.Q*u4BgDHJ1LifLkb0mS!50(l>ַ;ctۅd 27⁲GO"_f9ɂ\E75Ǿ4Rן=ɤqxGY+k0* (g{h&l'w^OjyɊ@i k+35v<5MK;C<3cb*X 3@-д-,˅b7ڣU qUaK {E^*W)=uvU8'I7&Sf3%>D@ՙe5& ^iRԉ% UvIf cuBQjb Hir;2*TR\*P4b 9,WbW o.EPAZHNN5PHIA4Gr^؉e<6I>egΆ*itGP8)kvMl,ʲl@ v T'(WJ]MMS Bʝ)VԂpԇUGÞg(s;RH0/S$:Ep1oBcጇ0BEhmWn\nJCX]W [tEN=Z BZsHQ CYxDW`c]!c숨=+*ihicUESH®4)* .B=r/ c%WB e) ό P4pRŎ=qLn<=GXa0ŧQp/ciJjJjS rtg/zܧAYCPgOf҉R(B(9mZDjAIEmؠbjPx]%xjע)mE{ՠ(5hR 7[|lҌa0H;V_3("?\xrHHvqpiRVhb# ֜ -C P*9&a@Y΂7a;lvS̭aZHnVm\&B(g+P\@aDI\N%bfɋojvM֐[C*k .إs;D]5֐ZCcuYC+L8JÎ/îzc?j&5٧'!d4EN2]Yq?Tv-.wgp=RM+^65a\dCXYGpWpVA,8fwYok{77F[HR MaZ7:Rsx\ϟ .saiKͱćW )tŷa]Z:u?m0kэ<5!^C za,5ӜvwN6Z$A\3ph_1k1饲ob$SrQkx<g)r ;,ҝvRX?N8xZ@h^89aVluƿ^irW4z< J`xL/9FHi̺H Lw@*ρ"!u|!=b}z_jLD;YyrQO]sϢ[llg/ň,Y]%KsZ̤4>bY:4ca|еnYҮdu'%ʞX9A( ?{vY;PbKIOW*T/t GQZj5ՔIcJyۧ)Q*omg7~I7ZkmXii! ry }:jdɑd'nRLITLnD˝fw8;?ikRJxWLtb\cf};+o88buf hp&V{w\yUtA6;&6B`D*)SFj#Ց!D9/rQ$4)|+nC&Vɨ]6j$vV1)[dziyA7 ^jk7FX҄ P!C4F^M8 cSPYzG1!׳ۼАo-ٰf,Lu\%\[Eno-p6:SQD;ҍ.U-^ަVV5sX>I3c6 lru5Zz-t* rvG@i:-^Ό[i)GcFKI[guo0הѺE:vij<8oQNj&wd5gm5]4lHܗcNf l;-wku; ^ᓚ'GTkxwcxQIRQ8ZLd ߌ>O$\qV Z-_drbA,tOg0*8f f0=N{9Г,bd__=.۹{vv~_\t7?0[AOdyI߿ |'ib0 ggO`kן ,=z~~v8sRׯϹ>LhxՏf8 yx߿ _*~i ׿|9 b2\E#(МXNƳZo"ŧwfA]a"oz *x^g027]09'ԜއoYi>s|1Uѧ f O+>1cSg/&K0§i?y6]|y~1Su1z<}Sq,jOͯxr<ɥq͡tTT.7yţN e]yo aWHl._p2_OG.xv3߿䈵1] Wb;v9e.qD=Xiȹa&|?1 y4YƳ`Ry2k%~r> UaK|t 现eX.' $&o?+(".l U.х+oV0\wyyvA@ e8֜`l/%?G?>?M~|>~H6GtѪYdirHYb-zq3~5?QWJow~Pg1x%uSmx2yi,fYj|a߾}]PnD )A XioX*'Jۓ)R2wq<~tZ gv1c/?ן6LrYJwӻR/\ ,:(pe)|Wy'-De>DrY8qO@מx \NrTT!5?'lX;HR$ytY~kiTVPD-Eq4hzOrUF,9w?'tDܽLJɹ7Dpۯ_'qݞ(ܞ@E%O9")Kq^Qdow-{?}6$R? ๎^C^RTeoGyli&ɫ4!~XQy,eq]ND$jrMv|vtYՔ'ֱMqZrqf/s!tV\kk)nkRޜ\-6oxƽf9]]ùvm%NﱉH{zaQS˻m]3+0H5ӎuvhhPdĻyx6/ֱNθ&cސl2] xxWJ Jw2OktXߵ֒g-egCwSorp-V#12w^z+ qsQRlQVM~*7j);R5rZ(DU/UExHK;iB[kw٭.D(0m5{jwg%溚[=:iw[yuZ(Z}UsJ+v-m76Jqާӿ'9 Kr;o4Ԟ{b0b *ɞ`]FSp.P!C0_I+ڭV+۲yK݊ ߖ#֑w&3[Ŵt&'15íZa/i;eBɿTv~~)s p՛ ַ.>zfoΫzV vڵ.E.6w#r's:%{o^kJ09h(RHfq-P;+Mzm$ Vh>"ΐT&+`bp A[`HPޑ M! \'}6\5xwf 姊/x&#ceHg΁(L ꊑ!H`*'Ri+t eQ &;*J` `nwF077 Ws:ef -bn'E#`,2f4S dшZ\`HM:(l.~`ςb,2)dyr/h#5ŻԨ57ۨ/RZ9xx^ CD ې "B3҂D8Dc Y(8k%$(L/C8~hfaާ?^۔|:[y~ ܕճ]=Gk%%Ab 9yz9 Jx&4NU?:zEHVMTNyɋ0tqw蟘C  ɃLgˣZJ5VzS,:I{//F#)QU[# zwP҃t}SWBoKpMyatl扜 H6<\>0 Z<|58Q 4O`JUB2pJ^#g\V !9MUoۊ>-TrjUo3<,%XkAX>~4%D#F].DЛm^"QF#4x!*+m^&6q0R061!xRhۼk+$1'@qyGEyvXMMG/=~#g\~\ du`Un;( 08s81lλh&PbߟS&U{ͫ0tp:: B'WeP*ds#gʚ_aecv$GGS=13cء@Ik(_(RR(U)F+b I1IO{08o]FP~}xA@cXdWX3>Pb ^b.͹],(D`I$+1ɯ}=HÿMj#'wz Mj6  j.Ԁ>hO21vD@C-9\ =Yi˕r}Y;.^izX Dn=wǪ/2J_ՎUe.kQx)ňsQjJy_^NGǪ"O5)P#h'l*UFr v@N]gJOO)E)t U 1/POQ^Q$UWʵ}H,J 7M7wKpː^|2έSvfw_2!?5{n'2v iL\&onnn6\?.ߟ-ws;%vvyٻYqcngK{?| LpS0Jees)Bk. |8u_2ٓ kyi)Ty椇 Hnmν`}o-hb!TZ:5ݫH9a:81< 4@I/-Gk4W^HpL&![0D -Eb VX ǽϵRr=a(JbF2PR $V5 񤓵24׌ܡ9Š^[΃E*KgHV8BӠ#&$5 "5 k&DE{ +XR[V%#JS"خS)WwsMK٩v 4xٯ&=+*$|ҾWE5'/?ޭߺAE #*;LA5%OF3@܀~ҀjkRx&1Rnfe)H5 h.d8z"ؓ~cujTC`MmN烣BHoʘҖᗛ̮`sf0MOg^1g^ulwXdw+q|]J=E єz_ D9\Q^t$pWݴm8VY3J#PyV׈MffkHrو:(Y0Re__0hBrႫRQý&0:s@yBj ԰2s1{syTۘ&&櫕L4KOi+W%1,rq"A-ļ'D'چqrʇPPO}C9b$A՘;J`%ȍJq|׻CϾ}[ g3[/7ſ#<-ȡHSkK/ܪr/`[X >64!JDLvRk7ո[*CBi޷&juU4,n5 k+}hjq1k:[ݭu deN l ?# ڹёVv%END/ V٪JӞP&vC4 <~w s*U vj^@"7Cی2 yI&WEۯ/:g<(Gƛv{Ux޺g.TD\Ԡ-V3o?(hv`_?)|rP%aބad7ySIAGδbRS quˈS 6;uSA1IA9PSZm6E*{:m#T; Iu82Y%z2ni}S\) VzNC-2JSژ%\+TM0m5IPOc%S4]7>cht>Ƽz29`KӊӃkq?g9Rb U8tOc}~o ~+VgϽAίd=%S8 p۫H(&koK!sT.(bmYB;oWD{t{FG2@P#" Aĕؚ^LK16$Rĩ[ZEpO)"59|(XTr%$W |x)=mN+:el&(JT XӕU0ҫ@ߪc54+KX6%taw~``vx[wba _c(IZ%<0. tyXN(Y)?SvY]b*Yװ,w?O+j+i&+YRXӮܲB޸)ToԘ~_kk&XqOiBSA B#f=1pDE[ͅ'נVo [  c}nU[6?azf)E40qp !?3F2܈^p#zY] yFJ+ʹ/E1GeS[(DDQ9LBߖ},Mʳ ۘ:л: ݛk|Xz[^H2Шgщ]A34_3%ڃ`,]59ar-pCW0 DȆ)p*&&'+R*q/ {` :93jThFPRN]N \#ɺGyE^6pM c\vsd1b4|l/ eXK\$cL3]fXVR>cݛ9 QS!t8l oIb≮yUf'0㓪:V:/;*?/*RSל;E*(L5NaIO:nNEIU)yt"D^ObZچsO \]}f{48)|.s8A|Ӊr]CTrVuCDYo(/[}Y-W~Y假;@ا{x&^HV-u3 Yc,9c9Aޟ8+f+߹5/%ߙe<C[%ŨB?cMΥ iXUW|wJƸ IV?L~j?)Oae.Ct¦jTMD ĝ{E p!$SpHMDil`MT><:E3u;ny*8(Dí3p%u_ΊƩ9fqq{_&q;!XԚr1qݒ^c> 5morn<`HGɹqIB؜+~UńkUSI}UF$KO>aLFkkof}z7)\)г>bjq@ՀϨmB=5kڀdFOvDnP_0 wњN åCGc#= >c;diaL[Vt^0v`{78L7-Jk7G+&#֘fKUxB_"6J5c&L ꭏVn7~[Zss{UU4NfU ӐI5~NDfIPmJW84rܱM}bO(]R`oO> )=P)XPmI@[mWc;T4C#RQEZ cNaܖZ0FcVΕ$Hڞ6{yb/.gw/&~/CָÍiKΨVY[1\c#*'Pvhi?e\mg҂I6lH sm_$#H9ȅMΆ̈́ W+ GC_'&Qq$)rB!'\,%4Cr2DO eX/~^-%RDͭabB=:Z8R`!. +BըD%ڼ䌰N6,S _I/?m`L555#{7 dw^4H!'d(=Dm)OnPFarmNB).AIJ2lۍ-Y)TWk&Wހ-$rA`2pD'P B!9;G!AtRvz!䴻J l;H1~\ 68MBfRYvaWx MY`@V'X3[3ќpFx@Z̴~cUdXjIi)%LEyFxKmLsy<) RV"Yۂ\:MNC 9؜114u{2OrMA%ĂsY<8kq5΂ha5yCLL1өT7k\9<B:@q8 Ef"QIs*'PMO\.}Q ^Ԩ PԄQ@]H+^Fn#AC^"hh&Fe(֒PR%εt!],x+ aʹydհm|5DD nh6ZlblNXM4ɦy'Rec:c4nAzdFc[M4ŦC8@4fR JAhT4.tZɟZ)j,䍛hkHk6̭ZN{F0h][o+_gyg,0,l3OKbcdߗd%,vo'[MV}U"YBcwA6}̗kV?;t{PĨ"L֛R>Y J{<'P+*6.0[o.`#gYUo[$=\<ݎL5\YO mAV.#8B]_o{c<`8!,6GRnFJ@`]nF*+v+v#bASdsj߹°;n}` 5`f&!=!-x$=<\"C$f煶DӃX?tD1B_fp(?Աt~;W ]fLP#FϵuzdHcΣuXzcNH[uO?;q9Ͽ 2B~u[,fl\V1]߇㷇"f+ϪWx.?ϫtYggoޅmǛK? {PT V_1iɀJv L>$.!is-j$CB.Z z@HZ i4gE; y&|@mr\"a6{A!a%Xd⎽ta r2F]e/ ˀu2\ |uIUHo_PjPـ$z:v*'kԌb\]PFc{C~䐮^e0qw8ns#Bġ/)uS* Pr;9dpQY"m, "h F)iBI @4M&~)ɦ.8E Q]XxKZXAIoNe)A0+a0G 4W+H!K %%ek詰:hlP;noSlV(R#C O]i  <&W%EI;&EB 8xə Ja0Wщ/=p]Q)H Ayk:dy,$l|R,)kYDR 4#SK$R(GK#wy[9D4%7T@`|)&YƬiZܽt)`O\/-e[  ! -U1%aJ9 B0W2 4yCsGŖ_S&dѨ_hCT5e?:W.92E9:g0Bnr8:0u, &-7u bfvn0+)\fu` C(oðyĀVZHֈڂ@/cujrWszb3df_U {>d^gp }2+=ɜe:ĕyXίQP N> ^Q:&>-9SސOWԛU5xuWK`*E>G0W-Ϸ 5q@Wv˨Ӯ,|CZ$a|DPIk=KO7)\=A)uJbsM)VQ<QI >!jl&8m3]Q2n`튚$\ޮNnWzzpQ"xF{14_)50iy]Pzc l(fkj8=視U#8Q0^/KoM4tmȂ~z+iuJ3/*J9 J9Ê B V0 :VĄLqϘ Ne눶VNqC 3;c)V `%xGo?k/ɨM&ewOQ.R /Ѥ6x6`wb J$BB^fT%>,{*hM3]S0J"c41# ÌWb3P})BB^f{֍a3yT5sF۟Laل&@B^Tb :s8U洳Sp B"0L(msa**0e 5gP= po\KyX"HVHAp 0.Ģpְ+PT+*\4qvzF;4n;0P`Rt'K/PH+07M\aYً\u@ংXIZR,`/U9 i[ h=!FBqK$Eil00B1+Kkd$ 8#D rFd*aBM[֤whӇ$NHQ{)=׈{Wxg}ǗNc کSfmL_׬]no>-;6i~K}!bOOupXÞ~.,ℰFhɊ3jqZ;H1Woڢ8m=48Z[?ي dnerQFW*Q5GJw}ܔ=7CW!}W S9bAբ ;I ٻ)3Z€>Y+4E{P %=| ͠+@`EFƘրSڂ-$Y jJ[$o]U9;oδwkLL $B9I#6fFpI&5b G鲃,2AzeW/z%X%~UMLdphx@+f?ԝR|PaNDoHODWOOcQ$]J4ޝ5/%8f*f"s8N:x!*F8U1 V7C[E{C6e1r,.G$ťS8Nh 7($4ǺT-P PfQLQXgZʊߔcaǏwbae ӌi[ڐcڐoYL%{d ͈@!_zƝq:bQ٘ąFf66&4~|:i,i[w|N!6)c4ɽoE"v{&v!!\DsdJa"~ք)'؀J*֌L_/N$'`[A#. 1,c Ā[d}EL11n y [O^* @rV1Ӓh\GY1zZ#\WV/?ϛp+dgo>X} xTS)A0qbՇYTڒCe`!aXLMPX%7jPQK%VX-N嶭:cR_wv faGќ.Ia򷋯UT(g5l@Y=%j*U˲áC&cDl{/C^f=7:(ypx{Q ၕş{ 2k3m=mrӷ< Y߭sn6V< *&mq{f`n;BEiɖ1_F2@،)sTq>Y:JJтr? poK}i`%lhYb=S=g&oh ABCҁ5RV )#P?ӏ}ܪwZ+Vͩ&j:Yixmŕi~?>0T6YOWp]%+%/@)CkSwm@<ufvG- z'jL%S|I8DV21?p-8S?Obye.lqHʸ@㣺^FuzmZoVKl$EP<a_"m׎PD"M ٯKL.зٻX lYk,wٺnClZ>ݚ`W6{m`(M#Ɉt*Ob tubwTSXySZUZ ůcdcsEawB"8ȓݢz(ߋ3`CJXGD5 Fl]Y?KV3j\C%%lϱgJy:)^}1:`2#1 n1DRN$y<&#g XZN#lYJzO*kqHEPeyX ڈKfӣ$9?դ.mDcmg"ꫪb%Z9l,щRد}CYs0T ]l k,mpqc0ĶjߦL_Nj|m7QZ*Nw$A6ϵwxL*~yzI狹bk܀Y{`T&0"n||2^p^ |Om'\6)񒋿.F#'x`n)D)$$|;x`C 4d7şbgra]EOb7)Tu#s'xsTV^qأ3JȂBT5Gkτ4&T/_ܗ%#\<߇7]xȎg{dPtkr`J/M|uoS /]xbE#j ^bi#K嵪=~2?|24.~40;Og!#kϟ>E5>+8}S[N{H'S8g0)y3?^Fظ(`d@V&kpK3 vlHk폜˚y9J*'a4U`2f=feߦ䭺d͊$ ˞fcb%I:Jc%BqW=y֜i*XpqXbQ*DĎkl1Iz@*o:zl_ ux/*D$IZ,312$IMbXqqnE]~9Ce!s0PJS3sl=ݬu?P uw&[ k㸻VbM*!`* GF۔;v:š\ czZ25-e}gg֡`!N]Uh|ѽcHbHLđRQ%MP$r#L%bԂu{qqd|V+5?6=cAXHF:e~sգh)wF@);ͮWNM%P<@UXzEP0I.)Ooш\tpXXU %r.IT*QI_p,e}`Z5н32d~?B-AV%@z&~=}U]. )]/˅nxuQnΆEeuIp+$Wu\5݋vBo+(.s(\Y9dG2{9:X4{8{|! <N z`}| 7?ك~ĿFVPAiu ͈Aۼ"hi5[6b; Uq8eHi֨J;O_BUhp'Uwm]CJT\؆@:w?|A0| !8< ѭxxJ͈r&p{((h6u4.@Vҧ㥌j{oT ^⺇T5b#ˣe_:8=2Ir[3_^9BΜVcm2Gi+*(N?͇VWM2t/FHS|2.j%.2яf7%C8KLu(i| :R.[|"a_W4l]wplY|{xrOW-޹y`f%FZs%xr7σ;Mb<ljrv L)$yT,ݳTӚb"a<);K,BWwJUg1:VX*U܄]bWT]FӚrKڴ43rFTR,`}YKe9+L_:.FYt,*-׾ƿTDrLdjISJ&Ƹs)%H8BHKꪝy;ko6L8_~QLO2=HCvSM՜ج\bnfg;^ `4^W/ LOV:Ewd3c7KlޝC*;W3{Ze u$;ڔMV՝Lzun2/V|"ZEXt)V9YqAV%<8c`ron:Hܯy 9I%R YP(JS!R.wjjVI`<#ky24T_ Z"oCƃT@L۽yL*ޑw|H*i+dB}yS滽6a+"Ō/CŪ`MiG[U|+w| ?-dOK<)UNo wx3BASS׵TR.ty:\ ZR u !Xw8L#›r;˕UJLu׳:aS;Zu=8> &]}]O7H#%>Dz D.q \Qe]}ʚ 8nmjz 􂐹YX]S-"(kkĩ9}Sɜkj=5_CvPaD0j ]bȝʧ`,?@ǽ}\&k[EAjl8n#xS2J?x5~jWt0cG c 1V$&LGY&X AD22BŖkS.}Kirvת/ ~ 40/X&Dn E]b%;VKxKx\Z5|7?Qx4@#S~˛{H:wx\{M^Z ٌcIy?M` ""D$̷>_ |֥Z] +8Lݦe/b4~{sPa n<}O(ŘӛuS+.z&焂 g\>[rv="[0_Gj5q ,XJ ՚LJ kχhzO'鿽,t)鉙??i8WP5O{fi~"O8F<-HBx4kmKXDp jHdzvK2`>l4R$N"aTј)XkD7Y$#LN`ĨRަa^ۄ4@;rv(N$aVa% u2N*_J ;Jj,|+SF#r8œ`4RZL6w $v2L$NxWDYpQQ[eA)>e- s@#>:qR8A48bV"SLKG+I1ӢN(D/hbb*2 -0V0gXȸcb[~Ɩ*ih#+Y !VݙJjHSumtǾbf% J0|Y`*Ps(Ԯq|j%H4*HW)_2t% @k-IuWrNbJ lzs{?eX ^&K~2Z^Zˢr><ߏcws-_= &EЯ\z-_|RYYY)]{I"@rF*0A(I Nl$FPGFXg1&6kDQȄC`/ (vETYvfiB-r9MyFK %%4Q$Pi"M5Y-ɑgU{*#p$ҚP:ˏߪXiU_ J:VeBRwu0~_WQ[ P\ʲ{:18OfoM>:V0FO?_v61w<2de T0"=o^8U0+ }^/H0!1{ 5 @͔k*$\UmR`ܘGFFOE2R֓W"o -sSF+7$x ,zz٬qsl>#@PA2qL_>9Mos7nlL,[px[lwg0;o~y/x;\|6 {O73\Ja.:J֏:Z7_*E8fMCU?bqQo̓Z# v*y.K5hҸ+i`[xjLˆVF3ϣ1Dp. wq%v䧯B d\7ox]"/n$k^,I,7`Ԉ^"rU|H9sUUsY+*W`-0OĖ2J#É5%X !z\ H.i*+Ǽ\ɷZ CF k?fޭ^J(X.u5jN( zͦgu IZr/<^f֕UƧ+qH8;en%N>srh ;ϼ~w50/ |aBCUEyu4vVSE9ol?<p:g356B[ m j ^*X7ʸdΞX3g턡8 H40W޵6n$"[SC8]0\IIv&0L|Wݤ,a4I0l"_UWUWWWSeQ, "-a% (l0喔J_cy/wVr'rbJΦX鍍$~Z$ӹqR7zl63̖8:Kc sDW_ MXFf}-z=Q|u.eӧU@wq2ws.=Fd""Ҏ|"HJ1RvݚbPEtQG.8<xڭqGj6$ F2eMB'nM1":MۨNDw<-}n;ZU!!_6)̎vtn)[S N6h-9*~4ָU[hb1]&1vkAiFv¸5iڐ/\Dɔ_ n\ڭ)UDiu)׷$.UmZ5hUֆ|"tRX{Y;OlT{)-S4j Cljlu_S!9Q l X+%vw^ 3f%t7|X fO{#6;*jHBboD^˜Im z8Ờ8o ($^YU%(UZm(uVrDYLl篃f3Wjb1\wŦO/gEOc0D&/~K? d8GgA^#mhPLJb[S' V8wvǍqX %z{Qno˜\|L [ÿO;n& 7X;4ԝ6-7'&ڋȕb|ѧco]tg!ZhGgg?oǓW?/_݂q`Q54[c?dit3/_7zp/]yͫ4`G_]fl'jGܚz?_~ Y_~zh=O3oι@$|W2!tgޅN )9M Hfd,QqXZ:)+(Vn .{8waj&@e_dz1A.FU`uX I lL*k\ CW#V^%ڏi8̕>{(_hӱF ʗ_7N…נ},a}|= ߽r^9Wq$erk(6Ě*0ATb,BM2a6LJ){ n}{`,ѐiifC!(fV%(O0 c l"){|;|RpŬ["STIz떵\Yۋf5X@ X͑.QTg:TQ$T[4yCKz!IțG.Yk`ߝh}FL%2  1T$іYܬ;n5LE6/F+Q^nw RM=RQ|?3rD\<#-%HfGQBZcX+௤XIŴk`R9LXȆRZalĸDѠޱwaca'Qw44mю?]mHV>-87Y:gdUnbrh+!ڈNj-L;7aݺdИKoDҽ2 uIs-@~I8fPڪ*"fEee" $JE}>/&Z;f:{̖*ϕNŨv-+-.Ke~`<X56cH xj8vjyEpkG3#|L,< tTy8]ZO=Oz汨=T-G´l^OT]"4.-w|: }|ߥE{ki)JGurV]_o?xzM8^lՎүM~E99a);0n7݇|P1^'͊Y8rbiF'4-p'JA?ZZ;8ZXppziҺ:EK}o-Dhk'j8R!؅Z[%!kH!˞.zBSσƗѠ p v+CAlU LuE@b5F+cj#FMbHI 1)@|m궇VNJ rz\Yhpٗ>/-+?,ӏΊJJJf_m%/E2'~J!_^\ypAc\;2C67 >rngE8oU~uiyдrס{yYDuZ3{~U;y_,k?QO6mqwBSc- Vbd *ƄKBcnFTbU$zuLVAպBfDTо(z3BtD?G V/ h7T %fed52b0\/M!ܺ aVHY<s]xprozr77oYqY1AQ)ePh(# VUlD+D'$kZX=Xm$nqR_'uz){{(fW=$ujOZP2^\[.ԯn[o6(3/6<|~Yʢ%@,ҋ|e6 &]ECڕ}igZ\OHhr~? *)  Lڧ%QnmzLLI<3mjA\B*`7E(Lv9䈒̈Е803m g=uxa[^¬B>Ђ ݐLO.iF: x )%S!G锵ӏ! uiocι X~>'1v>Dn"ݵWUPgp~p\ߠYo͐ ܍RjaSkb'U=|>-5X #F,̪ QO84ЙߪZ^?ŇR3iN֠y_\ $' B2 -Y]%sJ$(*2-mbzcx8n<%\ً't"NގC;}C$ʱ]V7ͥDs˩4RڋZBrUK3Ʋ[P:or. RՊ2u~ tźPsA* . {>5Tj)7NrV%ʱ-8cFʳ~g Q0*G-D\wU qøs4 ]R]x19 `_eqo25UӮ ԦR( vLFI_~TDZ>}_4#`Ir@9VV"ڻFNB7eeUz6Rj~~>էQfv"n/!'g\XBg6vFnYƒ; 8ݖ#ŀKY5 ~T7leR*jxE7]"1m:#F,' P$N XPEWSKNS W[rrڧJIcaS(n~3ħ&rt:/L(ԞjLRR^m5'T L,? ݗqqK$hniB)k<c<BJe*P̱NO@/Bb1 G )]K1”b&x}0R.-C*!YWNVe¯5էnt2Y?|{%KՕW*Qp*3)ǹGW6? ˻wE<P=)_ k-x|*0~IQliVa'lz ~V"͒]s6\D](QG9!R2WlHF}ƣ5xu$l[dռZnz&˹X8 6-$.zȉ t?X Fd 8[Og$LRd+kݔ$G[A>bq0{፝C;Ɣq.q:4+L{qtqStRJzip>a?jn$&v֣"V2{T*7j6bAyیT:)Y,ٌ1_3Fi*~ ;ój׈y) IYE^%4- tAe6o| q"/HϏ<7ϙEk \bh U^0K3;|SZNZkx:5QETp)&t &gZ4MXfRUű}1wbR_,Hӽ`9ϨΥ?5!9<~o>/y{){_FP5srָ/oh4OF뾺 oͿF()7Ր}Z&0lj)]AeorO@ Q.߹wRP\~5u?Lq4S5o}Uc/zGM/^9x9 {.Y5-CؼQM!,JГ /B@y2/r ?Sy)QԔ~%P$]ז,jŸs6uX]z"+sJ3{jj/OSUh6C]4ﻯV 7ok1i@`YY`4/4Y83wqSN{_٭5d5jIYi,kOZr޵"M9 W_dKp !:G9lM A9kE[*l1?jz.w#'grW+PϺBgmלONJ@-Eg;ڈASȄ]j? @Ezet/.ڋ\@Jz:'N/Y}"d OBڝ.Xܪ>_Cmi վ8x [-JO= Ւww\R|[t!3ƻZq]L6EǍ*|qH+Qa] s:%KUEd݊BѶ.qmaȬ[5 71XSk$U1@}]|g{"־/] ܚ#UxAڒ0J~[[kvRX R$fYYNwˊQhʼĭM`agv%ݒ(2Rڑ%e!Ilt_m NTƨണY&2aTf :nZ{LA5g )Yi쇖 sBzv'i J@ uA# ,.ݺs7E6#^}$TmC 2Cm^H׊*W9}|[bdjO=Bvݠk:WCe>hƐ?6ጛ"w S._?;"%)vE<PRL $kގC7V/g|vwՃ`T_xmZiq^y'pjXj9{qz`N]zwEzI5pNa荖q\ dqo빒Tf:֌lLx%[=ͽtzJjsR~$ '[esܻh 0f~wrX7yE`zQ=t+e^.Zlc0gk:▧[B d̥x+z Jcx${)yx ;MЃԧ>.T<S41%RЀ`0ɀb_ۀz!a c~"z4]Eb!dܓA <( bM OVyB"n(@F-Z_ Vfv+$< [|zoFZNɫ8ܲ?ȬBΩt5tY@'(z[ԄÞ(&!4ԃ@=C0 oIѹș]$ bċ('FK^11}jz3$tf=0sYQtg2R3߫DL3s.J9ƨO5P;J1= eQ g)HMpIՎR, $<>[V`D@&VWM.jX*Xjz@\+.&M,FgB`h ёʕ^ga|\ɢds,:ˎ6+I:(- غø>PR=t<>O:BRa7ُ;(fBRjYDvT+0[GY;a#bu5@/ؙ$'2כ ! 8 xyB݅V-ryB5S3<ɉr2e-Z Y;vTZ(c#,w<(ـ^Ii0)7.яk 3+z# 2l/؛ sĨFjNf8Ծ9׍c y䘋Nc ']y/e(]8{W^XʯČt!', ,ȗXPVJTXX%;HP,_p 0AQcqO(I(Y&,egMD]b\VGWZisr-"3kVT P0P>f5qǺb:>$ P|f Һ4;YiLKګh'K922xRUٝfZ廽Iټq_/ ŏk6P~KL\)Zk_2{u, _®Cz;c9wӇ8McMe_< LkݼT64_~pz#e(掗F! "@8ꁎxK7EaR܆wDMd>:wl|,3I]sw5p.i>6ŕĻPcQkԥ?2_d0fޚal}s$V$mQGUgPR_:x3y| b"5_2ˢ Kfl^hl‰$NU0+Ϳ<6 csޞdⶶ1ݟ5k%cT^E2.ki@O=k[* 'Jog:k@ov%Vͩր1- }J%AډB/ g!w 8 XQ5|޵q,Be3v/>bs8SBFf,J:$%YCJQ=Fa 89!{uXΌRYK¬*  %KؓF g(ANQ tSK}ZB^GI DKy6z$iozcl;Rs[a=j͙@bz6ZV{8zBNF՗oǛ2.O>nVđK\ jl `T%Vhewcy"c:779| )[OH>c#"^jKX'Նi;IQܝhH5ձX,ЙU"|]]؅kt)7zEZ9g: )Nl@ImCg;^a?v߫ xz x= =aeSO;vuv=tt $o*jlʭ3ph-p[$㺳'1.;hYA7vDr)8G[Ep]:y(ׂAyȡ{'Z}B[q|̦#AN94a|(G(]rf0 S d1̷䧢ό{ ṵ@$LN:ltiA9N868+=fSUjQ2$8C:QCb^zV뷶J[eԭʕ-'p{=THR}' #^W?Og8k"E;M>dRNZށ>=~G Р>N&}|;6nyDoKCV4RjM`$}YC"A@wIiUWt|8 +?hj\?$fpu2p%ZldEyܡb=\BQXu6&:+%Qo*^rw2k )z0:^R%g "FjxOͺulbB=2O9?;/+3^ƛc00,0?"FmuEtp2(ьNg3.4s%$풢lffH#\9RLNviomp2tEMwlD cy3j,OiTt[5ZN-s+hvbN8ȋĻ2sEiڷbڨ#_=} \V1ky$#,ՃQ̡e^u;;_ U/=~#O.UgPgC8S dQ읒28Ma<(UzD!4x Hj2ygԔp0&ga۸m^79} p];>o5bX:(Gtv0 `jF12܉SRKp1ʵBNCVO5\r?:N]xg_OO#=ϞQ efy2DΥeЃ^#3YL=uKF4`EaĂM3011?! q<>_EQ2],f Zzc@zx~LqP~cΧP_:YIJL>t$/.92%ٌQS62mGTҹP̀wQc MU]H_\Dsd~{MGn9yfu(n3#&pIFtuka(dJszuq0F@TRɁYi:0H3n`=+-É9jqzeF㥮!̐G'}2Ga4Ɣi} rl}}2D3:T \2iG?ߎHۗtjV$7F7xeᴈn5tM(O{bRDu[cytQlȎWĀқc{T, 'qzN+%q ?=yyk///x$\;xIW=6؅,.N jmq\Uo}{Dq?q!jL;Zr0_Zp6 䫨f?ybz~9HIͩ$-K٦3p:1NOc>|Y΍[:Q8]Ҳnc,Pe`B̔. ty $֑@ҲSf2I g_ݮ'v=B>?LK,l0-j9CQy@E霡:1`!3[:(Ε0qvEawh rt)$j)$ȃMNWTq$ԓ`WuT(g[Py"bJ0뎤5qx65+1R ɦfloPf~n@^QJ7LՁ^ ?*]^-B6y0 s4_ӕafa<6p>+ 6_=4#ԍRbyK,ͯ.^{ S`xYZsP Q Aq|#ulޔ7Tټϊ 䗗|NL.wppXѶbfXפRc-]-tS cBwN( b ) {$JI"JA{b U^,患aZ#qʉb N1P0 #> xY3*} B4`ASM0a(2zn F^ \L8FA| -ct~[P4 `O?Q Q-VivdZ94WkO8͕Dhg%-)` 3 ZRq* @Wڌiʐp1*=-s`:+IS֩-|28*RT%%!%#[n `#h΍r mEeXe8* }vNSK5%+R(E=xLAQ0pWSzoPQYo=W(gGasdxf8Y>=t`kчytTzTb&{˱#3-%s_ J`&(]jcY/Xh3b^i-L(eXr\!5GpMZ "/M[" 2Y#\(+SnP 3 't<`=qt{fb1( ¢ZjXxgT:y-8hi?v\%HaԏMj=oz[4vrs7#`]jF[&,^YJgc#RGsq:Keg4 %cmx48nO4:|0=M~+xuslU7g1P}=QVM覷 Z-X_=7ҁP)LŻߐ7?]j~ rv " D{0=?:b^~&}_0ߧ{2 8F; L`f8|6Km3ظ SIrqB+-vJ&|0h!(v S5|S+5URF*Y\(Ld8y>-@y%\x]Yo#G+^L35;19^<ȳ=,RRTmR28ԉIyIMqܱ<6۹o`Hs$30_DDa!dFmXx {3d"D\E4#+Ѭ4=p~z[o!Όa:3XxyE s ,XXsv24%^A%0R,ގ3Bdxh~*q U3QBb=׿.ːuտnC<("{&CėqM|Y/yC N.^V!K 񚛁c&ެDFst#r*};OI,qIHPBqeJ.¼Gu41"l$#A'w( 8\P:x 6\@g/N35*f63W,/uKu('OZ IBwO E r7uə4C'-6BɋKmޤBA?ݥ2]Pn1ϞQgp7QĢcgmH&]N蕹BZ֘Jj`@g0`)*we R@r‘Ӓe9uR!" c"EBH1RIە)CEPv!Ipnj[\xgcįtb 3^ӨیDHww2\-ɱXk,QJFƼ]?LD4{ryށM!9|NdBPgEE51r8\Ǚ"8ңVy!蚑y#/f@N;9 >kΟ.jt%K᧣mOG{Z @P+Tab7}Ֆ7䴷uQQd_ ȡKv<1Du~YHR!O}H//ם j!0K*%HSAфJFc,D}|!FFXl^c>hC͟';nb%~Q+jސӐH&噧D{/5pQ &7SשT`\9d6v&TR0=i1T)ׄx; =IyC}7 ,k$pGwJKvoA5|YqZA`F];E{.P#3Q1]%8ɕ% G,јRQ!w]uy<. |Y^ zW ʚ1v>c,W(fh*Rc[q/[L_N'0FXZNSOxVw)9EG*2SHaSNʦzA!.p2}slryQy.jraѠʴ4)x?0):N.oW7g䧳w@ՔxVId&OՋwSGYdiŪL ~2@#^ 9~LW-SV|8`P!WhqY\U#@qI*~lד!.nȉ5g`/ )Sf뛋%>ݑYIY}k-'vqf_X]ϳw+iT0*)ErsQg&ai#U:hÙnU;IZ)FQSE4$I@㒨rzgQmCĈ;[}MF ij+tĩ37m!Y[9xhWX{4pvA^EֻiO1-q~4I $0{"rXZ݌EyqT 6.j#M:,  kc2䚘 Z^,U% Exb0]VgY_~ rx-/Fs f}tήyz~X6;u]}PHXʫ%Q>q]U[ث' $ $(Mt@xhb"T7km*fO7xǫW\:k<¶İBM'ҭ7ƞN|]~qOVnj7d'4'ưRw3gU|)7F;.yf>u2qX"kxC\)9-;;/C}8h=^pIgpFZz+tF9m=65CZEMdoSygjT2:E#v+U)rjjM>~P= l ζ^]Kvxb<-f2gqgoAyttC,WFv̨G(&xǡx{Aa38Ot#6(,̌|w5(G '}S-* 9ЗekKFLkâa4fk5ZDBo]Z9'NwRہ?(6$Q(j8 9kcDad(ZTW!o?q-{kBM|=qU8q52QV*tQKy(7Wyd{eg>d}8{^ܻ2Ez-q!˼ 7-.q@o\*Wu4&dF|›?+QK;ngPvy܎/턵Ѻj3R?;sorVg2F5?|&j(WvURjr |ML5RDfҦǻF&y< ?J,ٟdʒ)e6$ƀ& D')Z+ɺh7H Qz'ok'|of4CׁOV(N8e˛PkY9YYY"V#)}Ah c; 6`XF~ȜL? OR c ǂNaDv0zل™Fʓcoc^ ~IŹ=8,PŅm}9 aR! B Ɔ!MQANr<.{EP9:-r:|wHu,w{f#e",0F^^$ BJq'=ahaC *=apl%u^YnU( ,i폁+U_rá(Q~CqCVJq!Y`#\ic'{EZ-_qſDOXSQ-8caP`Cj)WmdpSx+V!ގZSW:~qU]\@:S]t+$5镌*JLc8vU]4B% (zVI $sv@$k'%blŇg%:G20rk7 ~ag׋[h`.YXn?v Rj<OfzqxnrʕI>w(qݐØ`Qv =HneH@;1⅀ NmRAZ=TEϬѸ -3hh+3Jϙ&>s0/2f&AR"`cosSTmRꙪ>)\u]첼Ԙh%VsUDS."R% %AKs$(EjT#-6e8h%t&֨ "mCKM%F>]  iA'4hG{l22XJz#IV){".TRulzԭ/YtY=8*=pKC8c}%JF47/Z*x/}to^]aoILg-o~;AjE~=}7!%\Qܘg}"EHT.޿ޏPU4٬\r&K*WS"h\ ۃ₳CPbd"FS*࣒PctT^l2 sSD v aK!IQI1v<,.+TUuN22nzK0.\F#jS4L QPLLk2`F%յ[f)i|Vh!7q b\%ǩZ ǗHnIIzI1|JD[J'lwթ8Tc"| zϤrSN:: as@:thtAܗ̤`Pk~h$dsZ9WЊUPjy'H,d]}ZK ^MD=|\\ 0Cl d_d@@E{YD渳A#2-@؃=.cVZe)Hx2\'䄜2/asΨ#)vȡ 9{gt+,2 ՈRڋ jlLԅ,9b!#/؈R( &騿yzLY3aM.|ON0nc\g)X>wvya8?x5y2aGO\f,6`Oho_DŽ@UܪUSi7(~`[]eUff^ !/~2دwzNތ_ή"7.Żql|)vr>TYCu|Wrˇ^>c|XP/zlc latAK$>2ק6*HHyOH@8Xr z\EosAWx=<3Yļd13?^}#~=Ez}\rE/ 䪫{^#U#QZyLi*ܕ-<Bb&  M%H+<>P:%1wa BN:rihZ\!=8w6HNb誋\)Mً#@ y >gJ Iٖ:\}8;RsT-++qy8*9-r$B+7 tć[9ZsLh}lHK`^ mFb-۟y5j6 K!2K΀j$G;zCmPyLu`߬JByo4h0%C Qp^Ԟi:>I|$:zg?ƷR_Ofh|,)ltƸ*yՏ4Xo4f=⓳^׌: J7g_gs|V3uwbEhZ11cUqe,-+G4 {O~ݛDX.[%K$ b8obw'.ooY؃ŷe]{(Y܃ u/^~:9T/x5wߨE-'Iw48rm&rok"g;tyQlVUL'x6BU{aN(C>Ua`1B4nD"@Xx~L?6R41\%fz?n]tv%w8 m+2w5jFv]ka .9NCYҙ:aM0{5w^DtZB@iӐkV'j2yu6Yˏ=v煍] toFӫ8 \nn=|, f8 %^65F[P;yTvcuzP;"m0qOڡvȓc=m]q-BG\gΣ"6;69yI}|;aͲ̚Q +":2{x6F M1[0o,gn`g5ߊ9&uѽmk"T7UP;cЦ$ 7]>Raܒyhijڵ'T4Vힰmo¢T{â6#^n<m#ׇ64k)sY2;Jns I\*$۷uswݍCtQQ!U{N(|~K{*Y@F,͐ AXpv*L^2 滈(?Gg"|t92sB$gdwBu;*Ҟ6Ι#ITNLS=itΒE|Btl8G&mPQ{2 %';d־;ȋF2'%*jL>uԂ\2`y0ukLˮLgkвt+f+Y2"I}ŽfyZ s,9>NIU]Α ztcɉyIG %)Di#p-ф?IQi/5OJdB]4D')Gp4 MR( E/$㨓IysIei$ܧ#^}n4>) 5 dO A HK9ޓR׎lI!Мcovh#U2ٿ$PJAY RGhmn sJAWރ}lB20삑wW8 ~u>Ӄ~tj+B@/1y\!h5ʓUJOg>sFJkOqG}N˄G}rlUjIg8?թIHXDNfI+`Pqvj(Ebw> I&ѧ(Leuu< nwa$ NEC"X+WZ k7-U _!\e. _#iT}2d6G`G`/IJePxX"?=%J ue#  &"Od03cM,p#f<<|@`>Y `?&cd1<ًU2 ױi2۟: ^ŏ/+ŷP:4#-OfnKQɜ$k:03y*3jk>)-1OUkfa_Lh<;3◃0LW#`Tj>/MuV7%4Wa0sY\:6X*74 ݑ(a>CGhQq/$@Lg5'FY8ZpNlO`Ț߲γ i5w>0Tz ڞ|b~x} @'A٠:qҳAe3s}.3HW޿1]crcATUF]p˶*C-WYz~6hf𙷍 % BeZ+J?/Yt4_p領NzXvz_U웢>ןOםΕtHfڐ|QI٬Z¿65X Ev"`Y؆fvw55{Ys75un#f-j9hrDd|UL0&.,QN]e֦i r*S<&X qл䎱waVݻ&k{Nޭ 9pҩZk_{z7"2Acܞwߋл!Y: =n轨䎱wa{inޭ 9pҩM#->n}n^-&|Do-xT+ Gv%^W&g5@RIHS,9$!1!, HYh W(T𠹎^XʀGN B ˸&OX& S`(=,>T)RRMS. ?p<{7*ǜ,&LG?2hfO@()(F_W堈tt;?//wP"_Ћ_;f:n;ia?G#|csK!1G_$GJ¹@lP|43w3o3 !qp‘/ g8MhVG.,a = Eͬ¨bSױ+ƨ٬1:"1NV%|&Ӕ?_nn|5/~Ĺ&@4BAVQ 񘡊Xo#x*] 3yaٰ6`dz+4蕖 S h2@[VktuR&q<@aimS$7^ Vg%Rz6٠jxJKE2~ D.qAbnnvR{^esϚhKR7Jz$i[ >DYq8.+?\}Ț6,_۟^}i7ԎQjI/Lja˒khKq/y$/[ԊߢmU+32|,ǬJK~w6-IrjqsUn˛_E95Qws;]hnqv[EjXES-7a֪a8Hzu(Ո>MJOW(JhG wD'_;S;& #S \DJ"*PS f>l7bXKҒCnG1Ԓ6)zCf8ԝ%,^e C_kZ($szR50sFyθAz>QK[KQ:ayd j9 |_^Z&eqNosh&2ƥڵ ܳOyt_s@mlp Q2h$6b/3B"eG9D'-i)9zmxf'G`9N#MF+ua2ar<?õ& l}'bs8p\}͟yAj{ۿ482LQ4c.=-# %cHS+(չۺ|ϲd/6VDvw g0mY8>(=IF"OA\Xkn:jk7{[cNvϹ+] *FK7&%Hi_iQZfpm6*Zٻ߸ e oww DV`wF]1SLĐeSǛKo1j>WU]]GZ7"^~)ݮ~EGBÄ8&lu.d~:Ǯ sZ#"1=jԳ+AIIxG%CqR%~+.LNkdGʣN\J&),TJjO :$X F2|؅oւ"A(:D,xWDl`2 D4g 7NӵQX\0t)z(sƷ+v$X%q!H3h}Ɇ=e@X(w2ռDdHu?l9K?g| ; >O)Cz<9"(Q$=As?RZk"""T:-qr(IT' h&y A8ɑ) #DT;o6LMRŤSoĦxrRD]d~NS]مiwj/";v2lJ4UҠi}8ܙ: Pڻ)㘫q=HݪwoԴ0&EdoT~3osHͯf:%/6?,{&uDEN!p/N۷ 2袰=^xCn_|K /"jEU m_ Q:xGPS+Ds^K?M(owqM M ME˾O^]N4tNjŇ[6_&_{yBehmn^}J7^a+Gǫu|C#ys<$d q? ~}5ɩ^9h˴s9~ݶtAA#Hg l.~aMLR[%c8wI~J!EXR75J*|?w<s2{vhkՁܲy|8?/.tr]~NǫCu3rx/oCpsٍY#YnDw^7o_kҟKwzxWLYm!7FZY,3uī@r HҕO:mJet5f*}0%H$Q,H\Ӕ+iccLTIJ ghW .ICcD룩Ԯ~(y\}uoi{O^<' 8±w _*VPUT[)sHQqi}PPj&]ۨ)N55nLy .E'we8R̔ $"*NB (ӄ'w \F9]n2?Mg[\}`N?D޵ߜY[|ϧ͗si>kW7ٽz~}%{rX*;r?{h}'k ́:\ɍWS+Jkҕ)U[٠Ḭ\2+Nr6TJ2BBM =қ qJ09k Qƅ 3P I:%m=~,h,qYRCGK.9ED0N`4$J47E!|.>PrRrRny"~֫澳O̻G!;8Np 5=&",HHDǏwj3r_To:mZlz߱.Vm3ݠ"p1VX *cxiX );FF=!B ]Hkvu@xydIG*s6"/JI9^yv*5.x(tQv?( ],EFeg2oeT^FH- c+/hNtEXF=U!iIݧˊ nyJѽL6bKV%|%y^k]pr6;4Jxy*o{.r%F@z!{zb-_W*ٞ\ѹfZ4 VV]X*-6Wr(t 6@%J}50u"|> k0Q(!u8okVP."7y8Vj˻<{s]^gYrLyO1By"[ԈԽoݨ5@j ʧ.%f Z@+u$j~^Z(`tb׆Ӻ8ձ" Eka i6&#Ҵ E ڮ0θҪ  4h/pO7kEaɽz}[㶳 їw{ob, а<)uE7~ hzxHV:A! s C%+?k[Ɛ1"WP~I:,!0޿Fw~}seN:9;^_/0cmH6c>{EE\ea.jchI&(Qu1ėڅFh@Ew722\&V*! .(w;T[X FFLtE ܘn7gNgQ1UVut"Ĉ&z_5^hI. \KO!PKQ.[8f}!`FJ*LSZmcmPf#pw7`;fnkTZFa@G(6x\%ҭg<>Z@ y!׹>bXi-E*Wŗ[9۵w#ZJ$d/i_ا.Tޗ@*tYT \ %sI%l8G}@DJhzҌ5q?e׹kloxV'_*+BdRcSiKwOUtN 40+YjˠQT gK>N<%x ӝ():T~Unkv^pW(&E_܌kWvNWkp:_maЖ9^qEx\JqINj7MOiI3ʹ=K1a9] 81#ǚdb1}SC=|.JfaO~yo)2u#:=*QRQ9F=$yyf?&}%=PUV_oλdK \c/^rh{ø['QFhăIf$d qh!%*>V[^-dҥȔZj|ۖ96rPB2bsrKG 30ߝl!2u3:?jp7 $kq_Ж9$#B0z r|XC8i1> UnԔpzkCmcC=|W;`e'G%nzZ"1XɅo_j?DRk7q:aOk{j FE.7_tiTб"'o`7^P]Օ}K ~EF3XM}_nfLj֨{Lo/'&]^gOwؘ׋E<| B`kׄq=K#'&w6{)48I48If`S' Zy31NYke?P$F% gsOIKc׉:Icfh΁0<`hЏyg9yg9kY64ЂS)dn&7es$ ژ`47 uN uZKflln=8?ؿlβ;i6XeӉ2Ke$8%:DKP9ixJDABRS+c@(cNԉN7+"e/w./@\@݊pz-S$+]k'Jf)^dIJv5:QR+Qr9Ek|%JȗIYΤ4Ip捥CB([Ĕ2M4`˒pb TG-wd h71ɏ~O.Z 3ds]QbP| -:˝񧴪Ly[iƶ;~7|B ^ oL*Զ7jqØ.VoL!dJ-ʫ]n?O],S__!fO6s^!E9m'Qޫ[SofO=2 _3)*=BW yNy¨ Z!Eͭl& DBIXQqSJVYђꛐZS*s#n@{*5":5&d9M :OcՂ--44  v̦hXLo6Eމ1^M$5}*)'D)[,SZWgs(GG3 vfL3ᙶ*q24K^2GS׀d8/l/aHc isO>fhPrC H|?;x`ze,M\_F.zXq?{}pʇsxGms }sxϨ4Q1ǒRuYj3Ub.hsz+꓅~c|vRImp:F[c&ȥi j~uM@:(Y[;7XZx׫[$)xvXu2J}aEu 'L6KQo8)졨-a)0/xJ x~,PAH 5,4Z";xǟ3MkөqYAXb< {̽E!L啯A|Ɛ2q nԽ?y&# 0$"l$!GTưCF.`QR^>lSMP<;Ď i[b0z .RmƳ x>t`H- ֶ/1n;,V|P O.FF =z쓋 qH*okYٜӿW;_;^i7SEACELߣw`[] RD;h]2mkMnMHZ2%[ڭdxFVѩ*휧L՞hvkBB޸nN)|Οnfxt|\QA@PsT1g Gӆb ' ՎQ{VVsXI]%@*TsDQL#XA38S'(R'*-(bIk" CiAKV T;t3P"bţvwL>ǐe@T R H$R+a"P[iJU^5ѽχ//t hV5A4٦3h'dLFn;0KU$O 7=f 6he7t5W38>Gf,To2eɕ]0b+}=|6n߂K~6$b"wJjuXm^u7!;v(g㐙r[dJ:kx1Z| =ܽĚ< 2@ܕ6X d;t>-N>~Ea$)3)#:mDbPMʽL"ւ"i3fB6#K X DMG"ziJvTn'Ol '9?3FɃ p0I ,|z녿pJ>>܍Mb[Jm͖U^-SMΒuXRk%NAW ?~R lWL)&}9ᚶ](]L ޹avZ }x[0@ǓyYD3X*X8&g'׿;r0277B6e5i,Y:ugYK s&cYN-,,5,e|~oXiܣ-Zl%~=$;eݱ>W ŐWn2KAi[]\쎋ڑ^9i[TTVaW}$*l{%Ɲh;; F--qNF_E14A eRKpJ9ؓx f>hcy0F10 L99F2i T>\рI3uVZ|HHHt5\0T!9$"DdMGak2mib%EC+<;i7WP1P"f@,q,G=P1 3<dS<E-[gzk"!)zYӁHP#Y(-A*/Qt\Zy-XZB9(VV\JnaZ ގ &HK¶L!FxܛOӸKE*t`JY+p~)T2Y:y f%E>.i->W!Jwj/VD[2 7ǿ`cBhx1T5Y{qgO)|Xq/a|xƊS˜poYK9HPieb;Ky oA`a(V1+ 6èv]سDjVccM88gUͪHTgٞ$fư]xq{휁{h8#%N ;-,hB5Rh 緍v۠%^.hvALR{>#l zB}MUk_Gttmjpg;'ukc7pu.!uL4R1B@i!jG#6SP;W?kW*уuU TH` ;,aCZøEOؗ8RkȵՊɳiD950v q˵@KrbAYKYb !| ژXD vK1B\Bd*UϨ ĪMHe*ID)knVRH) WbO;T=h;M,J{*l< %NrZUzAK`2/r9ﮅyMdv6^:>KЧ RZup'G<,h*G5]زA_vί/8*7 4té谪fA[[>oѺkm],NFvu NhPt}{_g_wvĨ#T %T$ÂE,ݗ>FqV(}A+a%?BkU騁9:"} i݇yLdRa=k}v*jJW'}ѣ_nQʣHl_]{(]9hO>v[߰ g\#Yw-4UK uZ;hF(aAgJ9?=A<}u }s m^"޷B $6xcN^Gq_?S4nW}v7viUyU-VjeqnuoA-w9 q 1}y SYRrhRdj*8 QAd:pgv\(N1&Zg0c[og: T@rhrlӛ1ojm0!'0"j ҈\r$!"waWrBLJbb=vZQ֢3&Õ'*Ar%4*tT£Zb%5t_Z %&H02#浨A[ c9) iy$9Bù!Hrr湦r F*7C b>dOхہQ\ߛ/"{ctF剸/nh<|f} DCW?=ߧ|)(RrabY|gAg4wR*KIÓ8}c.1\i% CMLgf⌿ dKnf JMǴ:nRfWi*/ lLf¤&  所ݳ҅BG/ZXN!ѯ?I,:]$F3},+\0 AvwT>>&h*/g<(416Q-ݿ({pcq,E372&s-7$(|rB<31X٬;aݛBB"(m^뒒 Zך=5T(D嚸!3 d+f 3mR'`YR^\iѭC[~#n))MB?\m.$(zfХ\bgr~&'i971%ԥ}mk,1Û’4@x݇}Y 0k^O5s%oҔzb[e$a%F^ZF͢+Z+'M6ë&<"筆,m9xSB6%;;*sYCeұI>'4#gs˾俯2 e{҂X|vwH}wxy?^qۮ.Џz%KSPrաpĤob5WmexxY\;nGK.ٚFj.:!1q$8>6JD_gyZ1n]P˹u/r^2AyvUGy%$翚pE;W׭ RPGfOAPLK gJMWITa0)`1EHhh& @֓ѶU@ Z1 s se 쐁 P@rͭ ;z_U [*tXn֚IXhY C_%zAL9 Hg;+:ʕPPs)? !`dՀŲl>*!aGL:<3%L ;$9J;ՈZgb,=`^FVsl;A (ޡ^q(QG=Dcf]sDZWP%>XD\T/IZ 1x<r=kX]$!,lϯ{n/\:`>:"82fHJ$s7$Pnɨ `xm̼~ E*6ǯC5}ECYb`_ gd ߑ8,=|4F} G!;U$@**ܭ,&)!xz-IFw%| ި(PI*HVb%*I)l'>*z$ي!1 TLH!R"KAȴp`I&$Y48 :PP0bdqϰJ[z@+IYzENX㎗o"Jj|U]#feYXXOx,~#)h AF^A[5& >|iĢ)ծ*=An7v0 0vlGc cGAi0N*G),傩b80cQVe%ǶjL HJB|[غw;YNտiDp!w4Sr +P<0}zέ#5{̎p2krwq~޸li^s8}X\!fBUHWt{smNӺofGЫ!Hwc!v;\<养]kMnmH+F2UELy "vL Mڛ @ʺi:Vkx y"Zj<6?#;Q=n;fw1-${KE`Xp2@gxgZKUG[5[,%y㗛~u$ЦwMM3B:ԕ(l=ƯC\Io^b D?YmD ,LkYYey1,:d]BΓLf I_2qn t}wWijOdL\Rrl5L&J&N矦H7A5S`jn=IjfPv]}$v VM|QkuZkb[M&Ts7֮1qݩSvޙjB'5襂lx4;\\T(Ui>c)#T3I|R]YL]9n cV89uhF]^pӑhɍs j0%0"DX8^U]6sӸ֓&Y m0r3/&D^rf[mE~r{jj2ZS@}FѻpvZ߄gi<n,k'MxyDi0 D-~9g б pQs'!T\5BF๵ڠ! $C# ǜL"=SbΩJ M̰m*-i4 'SD*l lN#ຕ*8Ipr3dp3 8yaTIדD]O؀ K?jYr֤4FZl$ۤ6bxUMʱe=q4cW(:H Џ=f}fN/ppـ HMYTb{d&?9$6B1n-;U5\hq<Ԅ)xl'y ,)\=Ji%r’BCو%=d)Tp(*K ۛ{T\=}h[ҎgŊ&L$"g(gY1&^hx6VVL'쓵u2ew%:{4kQp|-*zobMf|n :ygcJDقyb}kY4 .zqN)539ߝ`ӻ&'༸qքLV~Y̟$į4 VhQˍ;ء(.7Nb2w0f_,|6e=f<9I`g>L+r>(Nk+0f:T?atg|K~5sAm^ЅO?S1iljؗi8 ޱ%ҾDX2d\be~< JyӜγTBhFZk5sY9RA_\0WN[ lys{G%}$K1%ٞ0t`#pi$&*>ŞnsOmr̯ Gg?oDu(c: HζTHg_K) 7VԨ\?.Ρ}lG|7\Sw//nÈ4!Va\\pWw'6Р  Jw%Y%Mu˻I>5US54^r Yb Bڅ#_gtX|ՕtB O7Q槸o4ݪ|XWiޕzy's .+@kcZ@w맜ڰ]l%zA9龺)(ZBKړ22'C+왴(P^m̋}s7OWq/64oU1{v) . @~xq)$y[[ 3QYޒyռ[?7Wf8~G,`0E~(Ż?ws`x>f{`=Ajdsk5Y͵f +%{Y2a<PIRQIrojV"}S,[QL֙bJz[꣔o@0d>RX#|[G3v%dHgRPzk[VOQ#QdQ#T:/Q? $opF@&U&y`|XK*!a|h) ,aoz_.@h Q}05^{mǛb?I*(p'I۹IJH䕦}*A[e3c4!d"A.,MF}& #D>aSφLe|ZZϔxcmOUe&z MG%* 2prUNwqqۓg & 9x 6T>m]+S~#dl: vc8 1\}vy6&LU1bN1i_ʥv*ש,RQ5^!m^P]]qy-*&{LU%DvVKIbլeVM`k%ypEU 6Rt~~qp^UzR|z S$m{bj1tqڴ댨SdcӕӝTӍcZcz2/UBy2')s,>|:>.Hz:n׭Xid]hD>}J8WF-[W[)ĴGJ 3DOHM }&` k#+),%!a~a ޷3 pޏJﳙOп_ſbw Uy|h`2o; [ï =puQ\|7e!h>-!oBA4 FR-A$!g!Gq13Q_mAyC,Pٛ~ ~N?wdHt,A6}w${yքx/ʈr päS&P!"3b% MF+pIЌHGhKaT 3l8 㒁$Q( 0Iv$(/,xI|(cR6LKB[Ѵ57aaBdENjFJ@*;2bJKc-A֥n6U TK-C+U! ۈ-y K+VGK/xxxx]dbїL(x 6SyEZHk00ZpB(Ɖd_M^(r@XlJWnT.V Lqv[*Dq^;q2Fڠ(&f`)!fs RJx&k0 f u!2kL ^ 3!fMWf3̊uqF0m7g܀v]~7P\`cP- @X*Ŋ{" ;A$hR!"XE$,V&AK ôX޳+iyTsTNK6Y~HCQP%AzV0Vɱ|bDiPE K#99{{ ~c!' ;rF.,*t1Spf25pͰ6+_ԮJ#cp D9 fN1UD0dxFh2ԱK۹s9H%HA"1 @%؀{xyξԌǕTKAx&HN7kr?_E;=^ϷTopNxft =k:pzހ}gd ! 3ґF#E-?xR<4eeS8]3R 6ޭOؿQnf WӛU頋ki9Y|^vRJKOYOBOS>VގJvנYczN}:ҹNTn(3v\Z3JչBڎt/; DdBTiReUR+mAv bF6ǑdDhv^҂ ƴ^]ᴘDӻ{Dt)? ,fQ5,>r1)5>pMڧd^lF\ حjL G*T9)+w$J$FiD`bRrk0`N,W9(e&L|# oקpIVo 5 "'TQM adl9( 4G$7H6@׉hOsdi %$N\腶 ţwj-ˑ6\8Ÿ0 ;ѱ;?Tw"{#]0-qPp­U@YVծ15/q`k%0L[/ܗ{yh'(T=GlvFfph'H(Sx12!,~hh^>Eke$'-%f!Qh  p7$kT\uN]YgR?g\EgsUT'x E׋vT+cv1N#.yC+NNvNDq[Er2_ђFϸvL=9Eo?憯Nqo9i(DE+Avu\u۷&3ZIxTo-B  I͔FuC"]MA"GPPdO0`椼 _[@ SPAH ocR*)!(buDhJyŃ}K7@R: \k./^idAVsY譾TO8=o]}杽/4/ϧ3kOfoq|'; jP%ba5_+!΢a/h\]sAŜE7w`bN֥~]S~% N4ۮc%{?lM{?GoFM׬xy0ʡ_]|g-2#c_)_l_j LNk]ڛ!cy79N^E߮n+An<>Qro}̃˻Qc,nu!<\ś#Η޵nR9Z8!/_o \ >;sywVk3I|YĞ$.K2UX'5:ʨvXfD}UY~D9ۗOx6rx^oؽ6E^OHW C.][205|@%Q&Ҟp<ڡ0 PةJ8h]`5_YKҩ=D*I\wܹ^:/i6[éGL/0G0;o}ĥT<1w- tyR]a}HUmT9|ϫECc{ 8! ϕ 6w+™Q * 0ҜZՔXEQ)P `@$f$񾾬x;Ƃ.WjR̪n*dUwB>+s(Qr`=x.|,+#.7ť18'D fok.V,@:s|I()3%#U0I6)-3 douSG.<PjI{c TUy,D黺o'AX7~LT7GBIFM%Kzq !\?`CT&^ز*w#ǫuG9U[ZWhmKf3=0k55zcΎAqkgv+ *E2O7rX+u݈. P3 Y:m:ƗJ:P3sxLg=7rkc1ڱaya~C1?U/!~k h,CApyQAEe<~AAo)۪:Іo[&hQr~Ig- Ԋvj{laHrz駳/wzXnBLX,*C Tҩ03 1n-A.-<)%MeP=5QE(}*z#pQ/T1³FFCJU$iZS'Q@e0XxMgNBJ+ G ;gôTP4Dk[/!N)<DZdQW[ d҅҆(`b {xϹٲKdizCn6{9s{ҥنґjS6^Pburhn"(k/)^~I itُ2Sv;S"ߍ$ʎeEвiDaP#%"R٩=yJ]k'dL&_x+*$D]`ߐJEJgEC˝`W} $Ӧrg_D([<;mZAC/k9=v'wpܻHT &^zDn.;靃nu[<: <>CUQ*12%me@i̭xUO5BU%3tt&iej"ڊJh+WI%D6ˮ$}[ xcB2\Ϧ {4V\C*RX)0Ȼ Kaô(9HAжNHMӽl{韧AΛj‚F9I.N~͵ڧw =t΢}%WMcq9'mw$QL+l[idLjdtj]zM Mzњ8pj:v"˹ p|҉ N D]@c%-v˻ɷj94R2_{Hā|8ہI3T𞻍/%ym<]-~EIۿ/0^]v䔆JO]<3؀m` *B$*Jy5{C|blF r3i1P1{'qP}p?5_ZI%%UFKQ P2Jc_xxp  Ka CY~0={.T<x7u-9R X%eFV A 6 Y5-;\06kp{}F.qo6w_6\\;ijs^[=E.PqAd/ǽGApC#<8d˺Ah҉ND8A{B\W}WV ! mRsl>^.fzb$C;F.J"Şp<=˦2ʙ{7כ[4N7P@>w3%@Wyf^rIBS! W3Ao{t :l3NzUAO|'*Lj؉݉ߘ(~< 2 0ObrJJI\f@VyÜҌ8tawе KfA\`EZ2S.ne9hy T$2WPHR9 c<9d[iB9`cow,؏$ϸ'Pc7-pII)Ba3֑v\0mgex9'z}964azl<=cZ)r $\4vR05U ?<|bq!G}z. 3(vo3"W|I͟G'ӫ޳m,W(ι0}?ۋn4\.mŲ$KdQQlh,J$wgg=;Z SLg6k%:HVYqV`[r#rxN;WҌl/}c5IJ)T3AyRI|]76ns|=G:Ş{/'+qcZ?ih?)`1U9 hx:,C-&0ESgHק`o?ߣ?~xrX,`6x/VQǬAD:u=9$8HyIRR"ZULzm3%1[+s0yL5eAVK@ھd؉Qb$*9e H*MF i]1&qPF3Hy 5$< 9,HGH13ɛ({9o#˳ +˳IR I.)f#׳txZcG:qĜd$6IFZG{XHZEVVC1!G[uY[f,L&^6cj#qUL RcMta/+h(^{^ $zDޗp}נKT0XօU$J<*e Dޝ'G!Diz#1R8@öT:HjhZcmv}p[,fܲYS/= UuR; pzTݒ NnN24I%*TE<UMqՃ{@?R(Bw} p`C.Ccrm7ma vv>)a!~IɔG0Pd`b)$&y߳1('  e;lqs;G09bsXEiĠQOs6Ka9~v3RċHfH& + Oj xc =cAw? `6e6^+)3w7os֡ i󤽾йa C UqVT *J Yc74(t  C03^glj NtNHƅҫ57 6ݫ;"0& _xu+c#\ 什DE&@w!+ /|3 m 5NN aaFPufqc&߯Dy JB4*[ E,@D~iVK e8@FG1!%}%Xrkh>\Ȗ[$Fzs(\Gp vn@F4acD]S2+" .bH)DYLsodC,]a k6cTɟOQX:0u4b)iG4!HVm1H.k\ eSGkqr'6lbU`P#v*3vš]"Ol6bBPBAA 6opUUww*2RZv*3MJZ)iSEd,QzNg]M$RF&Ԏ(&Ȁ6c ?cM\k1:@}҈<x%R?`HZ$L+6fWF|moT882}aDXoq!K/#}1/!a:8 L(Ǒ,{X ˬq^Da&u jblclGs)` D!*?8uZ# 8B>Ptߩc=S 1vQ;a"<ԓn#Sqm'w&El2j!^N6a #$z iRx&ȨyrIK/9[50-WC4gMHg&ﰭ3\|d;l ԡ˼Kbjp͢døLJ 'ԪJ˫ LRÉ̙1'VXhS_qHZ3HH0$NK;1f> |,D&Jelu˜Lܘ&`}H gP*KRx$K+:Pd7Z2CӞۇNH$:Dߔ_֨6j3E>S=Zœh,8'Z ¸Z,&6*&Y9˱5H{YT:P%=$.lՌu;H['~dyoOw#Vai_^_Ĺ?ȩw|fmaς&1<2)}hPB& ҖqhR`;N|1 KlJN_rT+ΜM" E1 ,2I2p=|ճ_^v:==0ЋǝoN~7wye;c|{ѝ,~{Onݺ u"un8۽0/`1 %}츅~裿ʍ/(w0{{|[ԙ ;JS@$gY="yO«WqМf c_? aq8}ݍ~w53}4"1?8}@L%k 'g2j_g+kSk]j/me׹'B⮟NETN]zGP@>_yB;ꀹ_|q M~_>K@?iǿ^>x=uA:uɔ_vW^PC՛@W`s:_ oOܔ>7wM7{o0LW`˜?/tjVA!TH.u`"H:ա`yoϻ!\͔Ϊ{/ޛ-\gZ2jB}o6fT~;)_+w|EN"xC1I:Qʕ& $n)F:}C$F6XՁ*`Uߜ j.h쩛[X  X[pCQJ"M4@uqB7i \'p[#[c.jjڞU:uSkr|!!ȇ|a'Ggj.J揨ժGԛ4Mp ŔpSSJ4}t1أ K6N:zthzP5M;\-SUsݳ@ߡw_5KfZ>Ze; |<O=G|zWhs_μ|X1D: ^}Gu.Ykq,.7u-9teKFrе=;~Çcٝ^NW OyQ0%0cPjKYR%VIS"r~e?3>}v83>xwwvn2^ߦߦ84!6ʩDzj&6:rG@q%5.]G2kM]Gc5땥:yUJ5ؿ)z{SX} >I]-7:_ljkȦlf5dLl Wp' HCOI1BvI*k|f,\(y1i_ 7q% p>4qO8DžHi1 <\.glQޣ=4sj' p]rֽDi2e|#} bSh#"e,Nv _^BY-]|*  ,y8퍍Iw@uj((6,brcɥF*Z)HW'@Fc 6ȌJ}dK>~RBntk);fv ߱AxaazxdtcHMBMjNhd1H1BCZ%oD{؎ hDDMt"hdY|)T4s.Uw"AvL{aIJ&.(J'8dF-SQM53@ Asc-i2cqP\tɃk 7QQp`NMRR0UB m&!XD=H@lF|;d8Ѻx{Y@x0j݀mF4ջ :?dp];j H\eOn*l&y]CS7¡#%[V+ +/o+~k62_J/lew|t_ l~xbS93}6J7龬TK Lm5ee++>#[rU$>W~#lޤ|MJ1jNkFaOkAڅsb^ZZ[R{A)nRv]yrbST?V_o.}[)m3q=Y%p܊;wV4on {藞em~ER9܍_[2TQWVIMk֮ErzTb/cO圖E,->zR>9bZL9zspɿ׹.u{?^ pٵW+Y&ǃ kwV9\Yc',ɜA;.ƴ7FfnqȽLySbyra먗 sㄳ ;q]HÙF0'^4\ 3^8ź{Sj`1=㜓Ou.;7aʚ駦ڍce͕.ϯd;I'!T'8@B3E(K6^Rh썑#z1W\Oj+.抋bA\t%=nmQ/;Mqf;X[~Sg[]\8  'ƼpSadYIhSݕQہ+{.%JcDr t31BaZ2%9jD 0_֮:pƊmOPD{-NKyW~߾~%YaZLQ>NrWv߶4_BTMhZXPj%g0V(-kɿ]P "43ANl0;q3RX[;S,'iÙZ7{wO.av?˾fu]Gnu2c{en]f> `dV0!!R/ay*U c)d;,~S!\GLN,>rڗJva໰bow/y')&ɕXĿJdv/Hv"Ŭ_ԿhAi +JϐՏMH<8A/.ka-C4r۞y zBᰞP8'WO(9 2&KX L+Ģw2/-BA˒%Gdp>y%&>h}xwxnz+Z5|_E-  XUqG@~{(^ULhBl7HJ{٫扫·˂X|?$j$LrNXT0yb! A EJ[K=()VWޢ~^%vg{w.˒)qe7#dL)D6)8 qFj֍#E/j|1c<3Jud"`SɉY\Eud%1j`5C=bQV6͜FuOTV8~qj|)ޡN5eX ,FXΤ<-!R9l$CKrp[+ qQ,,9j,V3L܃%H_هrQ+ӟw VHF7gHxB8(RvqisKfuK@YBtp5rp ϔ1Yi\uj#"H-Y"O! KWMՋ"9_+o|ugl@C,>!^|~^->kL~V+޾wmL B/zEߘ} z I3뗔^-G~vˊ.‡EZ  -N.NuUl(jv6#5]LU{^v=59-{clIFkߩ]7ŌZjnk[݈ۺ#*ׄ>$!^WYSa!ޒo<Ě8OYNUPF Vcy^)*Z%oĥ*L";HC=|Qld,K b5s3F8ssg/8S9gKsY[qCmKF 'E4Ν!CSNKnc?7Q$ ?=)ȄE,nޞkɩmN]Lhd6&T'obĎJkcEa"zdlLjoh|dN3K9ա;a=|UnmZ'G<*T7SIxjP~A7<|'`66.VTC&>8XՊiH%-wEcޟ(40M,.H*@mla="%S#Ҡ& WNZ@TZ2B#BȂ@!F0 BNʾB&Sg'\Q&}UAf^M9 ,w'bIG) uvi"71B[c'0*ni+K!Zi%/: jIQ|dД$i#KN?Uěio.>' d1iПu$#q( 7<^ZpP`*\59ўX;$)(Q22䂢2Y?Ii"m@O}v11 z?';4%Ÿ֓Q0F)Gۂ4,Q:j5$A(݉'H恵(9"DĹW;v9/V+=|ghA7]0ܹ uB!]0h\1vL;ˆHJ|I8ėtdJL^v<D.LSƥ@ 8HLspҕthEڽ7bv{#-a`=<KE#o GЗ3!rv7$8]G2a| F|$ a@i=v6$Ő $_] Ո$R8W~n@J%Qkiv@egёF;1H?DGJ`ǹÐ7N-tXXyː1z&)¹@"Ym+9| H}mǶ(:bcޝ&D۵g lʭR'vg4W%._XQ/z6rT xxajV2dz5\bJ@|I$e,Eb ,肉Z,frREm4 b.%㚄AmwUŐ))tYD <ef6RMd^:УsehSzѰ%`ZvJONtWNHIY'( w>-yvHS{n]X"CR|m3%q>)޲3\͸^K+nƉ2=CgIgAISg#r0]a'o"#r}"-jdөIGnb-iOZ̻RVԒF 7:u"q"BKP = F=kI!p-uO@j"RN#CR]ɢ'C]?_ RP8 4!K68>T)죨da/:Ǧ+&IcThq3<_R3$Tiڙ~8r pv-Pv-ڠTȶkmX+tOgD@2Rr ̉B3054FgvC {B:LI02/ K*E|._]84%s !L36nfv.q8fΦ@pSv.c]FmsO;PsƑI ˒iLd30g"0 3Qg"椷f6Fso&3гśVϤz[:w˖Hc=9SIwvrn27Rrbǿ}|y᮴2c^e!Aےn^2M06 !(隵gy-Ȗ]__׫7d cHEN$@g\{w-@*?Jr|?#ۻJ/:{Q Z%̪ʬ%݋KY[ZW"GIc՛+TE|W\nHfo]y$nqvR#{: 9^}z(X˻|rtݚptY~;,Hbf?)\##*l?lS޾}Oynչ*W}Q.d$h,) )]I6G m4jeGsǾh/JFS =4k3P{(^]m+ՓêmsŨ mO-uSX zR ' F!dtx˓TUIho\CȨ>d+176 F61.3{0,uvonX6Wҧi 3OL&V\;c6sxZ~%; ̣t8WaSgT9uMs}vA1@tͧޯwID FjnTR{G'u^>$]cMr:eԮZJCQ` #JwC8c 1-.>4v.0y7oz}B^N>*Q Mcb&MDMjW&j⵶1B֚/`HAPVYi#D$ӞHgf h: ?Esv"];ӞIw^9K]Oz/2jdF &K phOT;Va7-m3IitOPjg9W!Pzp(΃qv5V0aDgl%7y츢6:݌$EqnG30Vy^ [f۵g"1ChÃ֣3399ٌG1'Hmeb5]~8{vI{@CZUzKCl~ue _T㎗|_țlpv~wSw,[,_z% a~ X.ML{%*4WR#8_hgIr '/$q/9D,l[e%Ww߼y*_^ !-QML4q^/` Yre>$ktXv~.}uraE XFpsţVFD+47WW&tLkwe~Vx2~VS0r*d 10r3L=‡QK VM֗>b n7ӘyR pҗi4%YGϋ3<[O5,"\[S֥1R&J҆iwЇF]yϳq&$U]1(ZAXkBމ[ >%yO2[m >=l?l{C*bR:{fMHϣ ٔ_q^ro]L]]|j*XRz;6OOT;pWBor= _ўyv{uGDJ83${tB:!Q~<_G(ܾkϲq^).gGs|-/8l2Tϰ/y q.J%Y,!)oS&MNWqHw^IjT921$$"Lrk(*){Q!Ǿ|o{Z_س-:l(OU~G5RM샘yK]ភIGWȆ~TW FCNW!֌erV)dK2PPUNӯ==ln9gZO{o~Om}m@uM5k{<ȩ8PϷ,138H%=5fwzW+wg?'=bH}*ݥO^o]P+kD3'ag0)ARKC%~qz ש2|G_jz8YJG7M4\U*PNqxκdjv% kt/Ld8$9q/=hjgb Rg]+ϳx;m'̏|Z^7M$߼Vx~^%;hF-_*gAO^kkG/kN~{BHrS'/4ɨ)VցSx=F )gN9Cw j`x@tU'0W@WwmEO[GGOps۠ip|ZjH;q|;K(KQ4fvvfwfVJ. D<.wK|D P0  T`-J+(u1^"+V[$,vQd.y 6S,F&嘐xke*E̶$2V TVme-bLʐ̴VI^;W@{B9*|N.3 KQjxJM-d%WT:]]AeK*8Mb|&Uj+z2,]9vXVZ'pRcd=$XJaEahąBa]#s\o*րrQrdw 1b615׊96DȒwGN tcHJ[5N9#*i!_iMP/qh`yٝFr e',qYx61E'`“Ŏ[ML9Oa61@CU_i=STgS`7[4&P̨m/9X݊y^. .=ΥpKXw*۞;ga(dg@̔ ݂6{jDsfԶ>UĹ-#QyhF8 X[# BU]7PHQ,B+F!Yh|*S:o1jm6T+ r*k aW>pExQCփSB+ q5Fdo"\\DhSJGW  7ۓ+6u_p]Hw2) $9_N kZWüC5 ږ5mKߡ9rC-ZAeXʫvqgUqGT0u ݂ L',d!%#ZA$ӎ0}S~xHD"9;UDH/zUI%aRN߽VjOfLQx \"o Ṋ@uw}ԊL!<"V0 ô׮%0D6ƒ-a O3+ď HS. bO\O;Aixf~1^^&m~ 9M۷_Z %ղyGdMJg;n-Zs /i㲭W0ШI@ZVA7*L~RqU|&Mq3*Q´7%id:4VCHqЧF&D2a?,z߁Q9/8z硘PM :>Z%{q}X| 1j#:5Al;IW_Lp Ip<`k- *qA3)4vC,Wʼ\8~ؖCH2wv<%(L(KK >VIKB1|TlN%["Rќzh$_ޢtp5ro~Iƨ=>2a*G l@^dC,7?iwPV)M*v﫡ÚX/F?R (&h8_s+5r&Bi{ߥ1PsRSCQ1~ Ml0F||gV8hw,|M8v{SW)5%Ѳ1pߧq,5n[gXsD7XXAJٙTkTT)Џ,SB'6[i.AA x|8p:/%uJ I/H?kt,<,"T1w@C)@NmC0ߊtUJbkZq鸰 Ƈ]Ki4% ,θւUx@9'|4m 3)T)TgH- /&'ge fhӔJI+(@goKK2"e7ejм!Jx*^yՆ"mCI$-0T,JQNKբ t?l4'Ig{Q&;wFӽ`'I`;WjT*T;=I_1#}a;Y\EV[PNaDJ>Kx)?%)_0Gz9qQ:iZ"ߋ>v0(ն2(œAඃ $U*LT1rFB  o$4Ząonœ}UCu[,WKj֍U`0WM D 栶fP5.V+ho%Vjkeׅ̋X2HC쪶(̔iйWjv~x$l"1iZؑx|jK;0FHlWw*N2 `)}[INk8 mbK3eWtKwiN&*9sH;̡TYa-H+čz>xAγm%'t.gVzJGَJIGפTu !MsW89q_~/b6ڮ%מݞyy?SQQQN^E_>[s;/ą;eɷ` n8f8f8f8qSvdV!SSnd4LgU`ތA1e!A,D:1c|g({z/  }Og7 n|?m g'ܛTh % dB3 tN߿ՓDWhp^U5 `d7ȴdt̿M}ۯ9>>_L9zG J~o'f=y狧c*ěwҊ1' gИ'S . A]]g"Xf^ $8naZ{_?św?l3N9]4 G&S`:$>NoA[ѵpzy;za3? /;xs5\t緋?)^7\鎮$ H p>r\@zx> Buo聀o\W |xO0+xBwV9Q d2S .Cٔ{zèoi^B}6>pҿƧwn2Tz=|;=̲3ܧ=wxsw-:]w~ܿ{j8 x,5/C/Fplxyw=pk2?e zq&~2 O)k {̏.O_yps|:s/S/!{՗!xxwAϺ9Tɻv砨G)=%v.N2x ӫꥷ(oQp>NҞ4л)X77ѯS 5C U^dsI /ܙ Cӗ=#w5`R Wo/ 5qۮ&1BHf(ttNyrڌo@eh4|ի #wYk2~ʠЧq&t<"h1!6~&a^ӷfhӻ2n>͞ і~ f?'P/Iզ4߶;e!ٖrtfoM`)]޵ޕqlBij_ )܁〨5b"QQRTM6%ƈUٚ?DmO2zKo0o &g+!yd/ɯG8bXVRP1Ie1V65cV~S[1]oJP6ΟtQPhV ܾVh(Jcclr7_8DC)E2IwJAF¢S֯V5[b]@? Y$գۀPvRMY@bYC5/^ɲ/$Ѫ@_y& A£u>gD:RUZb0ֵC0DL}]zlBrf x$TvhDWJavqH@G}uF"zfdH5 \9~Jag= 8ˈcr@a:B6}:cx+~l?OW^ DwrZhgbZ:-fY{E(4 ʺ,( ['T(ӑ|>y1T r?L`Rg]ś;B?|dz:`S9gej<`?[ʴŗ/qT-8+[Ԯv6?=-f[C9>T/ YH `Q魏mM#O=AKO2o6:S@EՙJ/ؾ4گ9̧:|BFHpU$jãJðU0} V}A#(㇈ha*lHjE 뙌6gԜPNh} s_tW:˷LoQSk7H ]\/ex2r>PſT'd,|TsʪǷNaTּKa5X,71U6ux>Yz]Jp,9rAŬQ`s͕B ?s۫m1]MpcL(54Ѽ908iKzmo.1;)"dwD`kg)+{{J;H$71&R*Ms2M^KNېKp4VQ-ST/Wh):~zn4vOQIRNu=|  hsni ث*8ks7g6VmþVΗ\{Rp՚'aTF0bTw4T>H-$`!7*(*\s?FH@4D G; ĽuN T^cvT(|n-LOQPW [F]dMCu)TBu)T ՙi^{+TVb'RDiAjl/ -w!ϧ&P5*[SըRܭfalÚ@RNJZ*f yA$iX` EQPL0OTTr8 C(b!R;Q)Htsa&iH;js~𶬮H1"i"gEqRI0x UKZH8X3YrTVX 9a \1*,eAQПE !EfbV/9xM8h'6jXhw b9GJ)`ÐEwo nːÂw-Hf IgfiFT)ʤJQ&U2Q&94.e/la`,jG!V#V1XbP#͜P'ϴR5`̓Ü;蓎_sѨ5~Z3eiaGX8ªGVi]XB5ZہCijP=i{FaW#X_ԕU8K Gp` C$0ߑ2"QEABR17`1Q@/?GHx;2IaQVV86PdR1%GQ[S;kNn tcP*T,E.ANZtϚ̠X4vJ3xE‚B colM[za$hI/bԹ\`[\*$1īR5s]xniVx.@@lgsLJςl/۟`)u#t?UͤYu_aE4e{[Z p1˜;UӡIר)ю>`at7qv:?ר&\]+O uS  c3b調'QǴPOo|Jy}wswB&N-zvE@)fe0/0GIbdh/F,՝h;տMLd @lix.KUQTqUv%4hLoL0pʜ4/b*3jc=_ %hMsę.[Yh ۭl4e)dkm A0jUҐg%T_VXEQ^5_fDThe zҡ"NFIDdiuCmӚ07o!9Y2J6diC I bǤdc{4)"zG9ތ*Jw :sZ7jEm=~}&Fd!Jbh6#hGQch#EJ#q_jeZܹh[Av[Z*;U-+Qut([kY']kFq;~s/zhoL<+$Jy#;_cLMNJcn !W®ErXEZcκ"ɼyإh^*~yT5ɢ̒1P퐝m\(4ϊ&1HJ>7c:E 2Tw}{sQ=yOy=ك=سgIfU9]xEWDlsްse27:l~]F^3ξGs o )DjD"Me>U[*ē6OyK|Gaqlv!8RC9 K}T{[I\ s&h]}Es#l/:ARdJ Վa@JP[p"Җ$։ nXIg,yY5E|z͎c[<@bnzhRe?(bhGz)o7eC%c>꾻wc6z M+]woIVYćC De::0̻yK=! 8JSc1ՉtC2cvM{o>"ߖd#?7L}aG}3ɂv!s._TNwbDK.u9/JHח0a$¹f/2"#35aHkB+#Q_hR]l s!2t._)f m\zSWU~_5l!FЩ2Umv)7S|C?7S|Cm_ ƊΦ݂> YݢP5E^qk\0[%MSg6l,5x|өAuLSLeFOw&萔y r<<'->'):PSs^| \wIMm~Z~?2dL$: J#sz {Qd9 ej ,z$$X:PI,'`DqFf޺Q7'`ZaVd3X4gqPM՞)JУ619j%Q779ts5mNzn}[=?\np7cP_a>ZO9[|7g0 <& _~nBOD^}*J}9t_) aJ.?'=ǖlr|mqM ' ?𮴰= ̽d&20'c91Sxv`ϳ<̙c*J\Ym3un+ͭM5V\CȌL9,T)BO1R?g6\?Ʌ32$SkL V>RnWLs4*Xg΁6!Xr@ r_.|_s*5΃rYLťV2yPLOS\&FVbG"TZq*AK%g:`- PZFsup]c/@%&s0ss~Χμǿ* i yGW9.ߙ1: `AiDUEז ?spv&.܎&1 '90c4N{. Yɷ۾_= QKث>*8ӲE))gE,f-K+u1h1SYN4v҃~.2, =" + 1"gFav~iNblW;7B4[tJ>ø`Su\z ne4% GUiju  wuwq_1Q]]e qߧ3~ȺMSKQdr l"gk~G8]"5Ɇ1\|*0K-R4lbgrJ'G>tMdk]Ԋ,Z^NOZ c"!p 3ܸK)޽q ,SG|{XG2̀e;Xa ^Ed t<%zS4dmRorRɹZ'!VKgDW߾իA¼p\VʫWH(yJbJ 5ݰJY%:`R+Eg1ɓK، OB΋UR5ݚZ$X*p|)LVjةKnB׽սW6 N S+K$VrYrR-7tF(ҙD˴RRF۔tqfbnL#w!%7 cef|0SG T)%("E*7]F}0%3D$k0QRuDOkl 9IJ\w/qZfj0m:kQ^`b|fo7`D?f6k`-a(zfʬEBױ%SWi\2U `%8n W2zڈ <;3Ӫs䇂}]-'%V9+HX5^D:Ъ3Eb]Leq w_v" &eS *FRIK1@W_\gc%ʺ<+1ɨ|f>Ձ%8w$χ+GFʊY{7dIS4sZԜ] hKJp1$n }=iX̆^k68MڜZt0 m 1I.NFZɈ69i>G͹9q#ݡփc崞 l^&ܟO'l?_Lgz 4HpDw5U8tpz!%u#68#6_<[j@7ko_K)CDRlFqkS+>j[#+>Ǽ}7b(hG>r!wFBbs"\}_Dmɘ娝 O$pVeY%?tJkkm9H3O䵾0>;fkka ߏ56iMFVhd[_lP`Ue5[lIN ;-BL1P:* W _,r(?RIŘJ@UBUkc(b];I۹!!N#%̋p1cXL=oQBL1L)9 tOe _xN\::8RJ늎)1JF#"xZIژ4 =vO,qEE^ސ\ B(ѺAi}֪)@Q9 #`Aq=ރ7ˆVŊH.N»PPkya6M1ק9ZUZb+Lh$@œPZ)IA):]qqL؛Ͱpa0,vC88R2veIeի͟WJ}Axwڅ|O;q2CMT`O? by3Iɋ7%Xx9?WH q2cW5jHLCрUr7 g%gT"xO8gӯYzAH|fU#6',p 3ZR'hǡnZt%PڬCJuI- 8fxC6Zei$8^TH;hSuʋdvNƒ#hx'npN+۩I?ȁܬeWW䎸:Q^cmG*ʒyixR"GYiI"-PH0Μ- UɊKc|vr^5Dcv3N2Ao7w9Z?M_oV>T{[szuSepGLV,l?r^jW?9߽ o^v% mEWLaXqFXћ:\hnzZgHesآt (B}?¿yz3'sSbr]Vm&؉'N'X6qok;|576zoD .u#u_|wOǯuM*cyJvلt޵P~o޼D'^ d%\B b[툂ؽ=G?7OWAEucy <7o>\As|U'6KVX QȨWY{a+ ggVCO0Xo Os7cxb?z=4¾;|r{CP.vu9G\<'X1orlÿˇܜz{}TG>|gO|YS~e'zkON,';!dt*`7~!?G|a)|?B [^|/<27m^=w?0oqbyp7ֽˢyMdyv ~ُ ʫ7l#ڶ2wuj勭j'Kl%s] Ld9-Zb_ G'4 kd~ottݩ!APl&^3GRu|)6tlYu]J3tT>sh/kQ=`L a'CPW,fC*JM4亘u.&%B w(dҪօih9%G.Go$ zsy(nɪڿp_%X}b~%fhMSE|fu՗_N}7V#K?j <H!Jh-y׭-ěԀ%*h!±:X\Zy8}Zxӷ+Z+g8qC=2EwtkOW:WQ<W6n~{1 q[LD/l~`;8P-exw?A[_ۏ> 9]R&_ ,9.˜7?}S-μߩ˜7d̴y GAqٸ|q=Y-g=Vxٸ^Zi#բmܰgzbmBly‘и Ժ6K\Co)^YZ_Wߥ?C􏴟h~vO' s8|g=/QE@ۮ͛-ۮiFiHp3N{_ȠBΊDÊV 1G WP$Ol^N * çT0V J*5N>ec3 <7E>GJSd|U 'SO y!Q/4^FiK n FF(QxbTR # qbQ e(p|V?Gpr,^;h~@4QjPeZ9wٳU{RtS\&ĪRr,MsR|l|)r9S`b!Jޕ(CВb.(XD7S@yhB"U J,@s!6YF&ScgκkliBbd~vt'[h-:,7gֆc†NIE0I=։dCᥠFgغ6R$$G V;}$cRXDށ-@ YmqJfc,|M惥 ^-@LC'a9pqX0ˆ]KÀh;h,^p7*DFZ c<hۖPC,eܗoc)%CEƏ{v4$9&i-%W(Fbo0]#IIGGյ0wcSV/;h1 = Ϡ;ޝSoc^ċ9ٻlkk}-%:ӢVkqHnn X(&>b3$c"caǒ{} "; ,OA;Tl@3'W*,4t [ɤ4Z N gpwr%))#X{Ϩ$FqUJJ !MJYR¤▀ EY1FDVJC>cyUA(bY\̵`4JJte7/e֥eHahS̱ sG\a XO\A{[bRMJ^X =\Y`++K8,VH:jXXQrN1  x:u)F`]p)Qi+rg![V @F`&6\֒ erSC1eCR`vt Qč2%Q:s Z;ϬX/4'ɽLm2 w L#Z;h`c𥋬\0㏍f{̏n◾<OV.crJޝllWCBN!y(`yUiP$ (җ:YD0 PD)WWŁ%(OjFRj5u<٦,l S?=0^u|5|k$\]EVR=~~'>'9*?EnYMC?(ʉ[ >8e =2t J "0p0 B_j;_#_~l-K *pg`VJ/0{$wfſ*ku0w` sj/- $Ap&C5 V#c< +X`J ?0 r%tsPazWC>p%u=9 R^bƒ ŁCU#Z!>d.kr`/J8&0NRs.]uf.crQ**iv^ Wg.grl { ?Rdλ5aZӫtg.K"XWRۮ(m={8uZebm0j-~Mz |7wk/mڕw~ JofeX%E7E@+E6H)/S_o" )_[r2\ÞLh \jjsyv0cϧjGSY]B԰hպihGSY]B԰~K ю"IP5lTqNh \pV+`8X.\;d{T"ǽUK%kkB֧r{l bOU,[] 7!n{ckkc樖W/{br;Dl_.4{$h$:Y I֧a-w|ؗ@1cD=Ħٳ\Oz`toG6fOT\{l]q&)VGLM SyclaUla|)ooNgI:/]]]I֙*hx0l0S 4b-־~qvj^S#"jׇwfIl!l9Ak F+JSXؚǾ[ D m;E m!nM[{:v}T/2gڤ:Ԙ W&Lt5Gj{Ql>1[ij7l?/ܧHZaCS*b8J#S!1B$D[i K)r0 NP^rRf4ajPɈץ g)j(lj\ç1-_w?C zJGEc\,NTD᭾A9gBV}c!S ߂u!Xea $YuE+`Ps]Hkk L85IR0,x[bP!iF}Jiaҳ{\!MQ!g LO 5п5+裙.o-Jcada~^@|!_F/ TZY*11ZYGM5slT`B1kE |V9| W7n)x;(=LF 1FjZ9A?^Mߏ\[RۡDIQrF& dTyqo\'*a2O޵q#"Km,ރ3x' vt0fYK.g-2ӺVE*:|x%#OyyV k 3sxEPB'% WP^+YS&Xq=K-L[*JRܗݒP|qxWlǮGGG|\|H`l+Vz0;f "9$$)ux\K2@d +$90p&UF%Qd ,əMIbFJSKҪk(ͲenOnzrLZGK!8Ika3t5e!⠠ܤitCp$XC%$#t H"׹6hI&sd+ bs^J)o@pLND&M \`pLSMe]ehI5ם:m?ۗ]3ʛ'+MBŏ9T#Av-59E݁K/]W!٥~V,v6u]jk,q̏_OF$젠{7}->_z~3̰e u4 X{uM!>R+G$ڟoUUy*8O6Қ T9ŝp9c48_2sҘ͔#LPezvUARea<\}?+=ѳ=-M$cWLTihAr P /SvEB%T|Y.p1* !yPR n6{y#t'wd||.ֽ|o÷-l7gهޮ|+X_jŵF<7xo~\5C- ;~G\P^ YNUzW܏'ӥ [Q8۾QJÀ|7*87A:myl@(afF uq{X"~6a_~b[-ǁf?|G#, t N9!0e"D+&E)!aCr Z6vj` T5BJ[8DJQY aPWYVT2 -B^~K*iXXE|G|y/PϢ$EhfYXk b %mJ" r.;PT7eozLz Ul%@I2S [ M&LH(kàbu nvՊFD9R2c`R0~ Ƃs;I8%VOfQ 2QMq"5lۍ.A td{l?>' 4BMM?_[v}je83'`l54ߥ?hyX1ꏣ pTWl?xAwW ]i@} Ҕ T>ڨ_ϐ Q=$r/A~L.̀7vǝWFm1ršN*tc}AX9эgG&,OBOʃJ:yw@I%1;ȳr>ȳUl5yxD7yv_gyo/D}/v4m{;h+ײ# #ql,;#Ŝ9֭p7uV1bO놪Z.ʫL0n`dtj.DŬJ0jbn5p(8Gs_`\@zh%l(JDs(P9z%xvYѳk~mG8 `gOoH@+k <+̠RisEOç*"޸RV"~iwÇ'a7k {/!T|!iwnb؀<; .~pN&GCTv V'* ]ǛuoI,ݗAƿyVU#K1蹝I@B;GЯ83QbBv+o}*(.7)]$V ADg*LXSyt =E(ġS] ty*(ᅰG60sx8Q{Xeǟ&^Q^{9Z圯ۇBv굂owA^H -VpaN]n'vovPG9[{mt/F:z7#g k߲Ee.-F١#I /F|̹_FU# q+E _ii6heĔ,R[ ;/w^ Y.L&iX_KenV~Ef יf9\9`S*S+ UH-v ,Z Bj!f#+2*, 4s$u R*]A)R%Xyn5X@!* d//zrnDz#?.`kp BSme-\[KKV=Əe^~_9a7~5^-~5wW˱oNR_  WA .޺֩L)^ٯ4%gT=h$0gooFkwV-{ .v7!Q sY )vZK4X«ꨦ -Ժ9vLt2k[j.V-{ԟPѬ=6XѬ # QcXѬ=O=Tk+A$yJb6 ) tJ9 @Z+wz#ߢujm7u (l[-¼nLټ~lFT@[}6VBiu58 />QSJ-s=əə>pjl@*^_¢iՒHlBu<5?p4CS!ɞ1VQ>JQQJK _JtgnZVfĦIvx.#DsRRo I57Zok1˄[:A9滈8/ɫp-Kh/reYnɨ+R(c.ѴuFE4g[,yFQpo^|ch͉4wtnR:d ɉ+·BCj9u\eߊVLm46=QFã$pJLrZh2L'&7%ReP<~C4٢QB+^H>t=&EyN b dVGlbʦah-oްcMK~x*CXd2W@x32Y ;EG tFSoѬ\@4K_ ;v0uW3@\AKzp8b!|3FW'hSiqv\""tnmUk3nD"Ѩ-b8b7^'H; !!z^jo^<ﯿ,.N%:`e–X?~1_b1-iؤt<-3ZYxB=AJ?fwJsZJn9A;NW~>|x:##%\^lfCDSJJf(=SZ.C'eOꕉm? [|LheEIhS{s[6>bҬ_2_ë}sBZ4$"kUƏ#%\!+0EpD&)O%xgΥe2eB\ YԜ$E,ɸNx⪐>=AIg-nDJ 0jxqy 'A][3 DӆzsXβ=J&vpcF,^{u6Ť= g:4) eZ^mxգ+PJMLj"' rW p PT.rWSYAY7a砂}j1Ýu͞|> bp88~CާPE,ȷWCQi3,u$&"$} \̺-J.GY_'NH'&9^04螗<;[BDBl8^IR*}ndq^R }l}t<-LiPV.G3D_d)qo);Aes)6Kr1DLi~\{ rmI(Qz4p1yhZ?|ڣ5Vw& xzaժ3XxEFCUPݵZwy&aBpazMvv U_vk;=Gn{߯g8뫟.@^m <Q„fo@ WB\Ёkjw=wwnIƼP˵ǟή_O"DƧlw.?0BsSdr|Y_\-&tygS P @qgYE]XkZiM kMKhMX [z27\+)REӔY2kAd(/Rl2e(&#M;)3V3%ʤy$^Ќ`EVHKX<)+0RkTnoA]y Dxg~jWxpya@KH `eFf\3F2[(-@6:P*i<(/}$}R}L]NJ69/"CDƳ R,ekA>[ij0$/{"Ia{C&.v7 a…th,u&N׹0l ̰ZjϥEnBސ֧ SK1"L<#3Frl5 STP ňSppSbRԙֹ4# jǷ'4#CE2d,$yeybA+)Pj.8˓jcp# IQN|6AHR5*F ,8E[c@)#žIR9s%B9q : 7\\0A;GvtBÈR^0d}yv lHvᇭ9߄3vO__q ~q?< Pw?Uz`y[j/s DDHR >HjcO_n7o^Oqsm.]{'u|~_ xXElg W Fj/iv- RHB_m=>|_cK *& 'Ex{x oa' ML.,JԘL9Lz1(Sm 5".8Ew0|20(im)Å3!9! P-g.A劳o(:9`H^Hxm$HQ|oP?m^IWwݧ_pWXۇ\ݻ&"%D^&on]&Z~޵^ R>Ń' A_/~Ң7oa!{Юƀ!e'G*n-XI\:k"k3+܁#Etpzw RK)8Z^Хڟ%$ ]$bޣml&&#=9ʯƸثp7P|A %Cxpѧ4R*pjBfQprz]_s)꠬$xuA#x@i H2b5CQNwwGѐ!E L X3;A:3Fѻ4C4 i<&k N8Eo Rt)ũkeGoFBյ^XgOpX by8]RPoaޢ*;<ЮƐ!zl_]Lޣ, ܡOs>;3ɈiQ0d0 ɢǼz=ѠJjFaJ1:5Rt#R,6([V`*nH-\4#nqQ.<[!Y 1 *ê E=7c8JRRhM 8zV(b+8X#t 6R2Mgt+_JA)~bx=U& Ame}޾]|j5@ѥwzRs%Sȭ5k;3N%*OLd;{N-YYwׯ}DRjk?I MCƅ!cUHߋş m}ƀ! zۧ.r@}-&}u^&kyYhHet O =?s=gn) ƯvjiN%N1Q4&} Q@5nzMƻ.dqsB4 ?{dRspS?Rw~eVΰ0?Wq޹A,n:dzvq҈#r6͸p=O-f1 /R37[]<]ަyM2xWJw|;\tA*etF`Wy]KPv]tAap攥+[^JE;F]h),* SY"%.vWBEeEًʖ -E4/1:Tx#Ku-y*R2"0΍,xlǛ -Eʆ,j8kG JV,~GcpVpaw1YU t`xkd4\'xUòrvIged(6#( |'8".r.mq'| :YO?cɃ~ZÇ|*>t'["˳;kvA"*o]dq<|q|71ëby?\9y|])n[-NJ&XiS5F!٥SPˢ&О5Xlv uEg2 L.o?\xyzdp>;տTHT $ъ.RlU륔Mfdc"*Z,U/5[(-WOo5#F1⨱v*MQѝry31)<=ܹ]oӻ:-q H;:7^;ZN:OsuB1Ɲ0yjO@1uvػ.ɭGP]<97NKqО_EɝIa-9pO{yJc6ź|KHԃ6P i[)0eZ}ɏ[7ʏ;9 }?;ۻ-wll2|tOa)78)Q_m*?Z w:T*IgxLvO6̠ 8ƊD5~h.tDAIl ݽӞT7S g`r2'12aTD %,9Ι *S'5Jfٹ98K~w×o56; HYIWTˎR]n9jF4Юo*Hf$k7 wpyMKAwHej@!F.qܱ)!DAؓf@zеOB4'9-D$eyH2& ~l}7eJ7 +oSzt%+jn3KG:{>&xL~4WC8l5.#aG.v{齃盧:nޣd;O cSşױ fuPˇ f+zmo |seʑ}.d(IߍFZG(jIşcgh ըBdMz uݣx2e h(`8aܨ$$946I8P!K@5'#fH6/z,Jy9[N-_{c ô)vugUo* *'uH6^.x#|:9`&S ۳j<~|8|XR^0#쁜PUL#ZԈI9Kj,yF܃MhY ]1t:=q f6)hpO;Z@=WR֓;RKFO޳`8P9м$&;L|wWm_jxqS;FMpP+o8 < %"?0K?{Yо!ͮo)99u)ƚޔxtT_a|V֣R@Xc߀Ѵi#xu"N}Ro ;)/@X!r֊/7D9Dr! $Ƅ=O-'=. Q;$@fM˚$鼏PT50{'J7HR6EΟWjЧ8Ab'Tz^:l(^d7:zuAl m (L?.' R=MXc! LP!c]|@c#GOs@#cZk?p C#}80P(ǾX !)|s@w/q >yt=~\.2|Va"38#/"Pk|9\RC <ucK}jcyZi47*@s}&rK)ԣJ1> @b;JyZPNF`#@ڰ- edk-Q\RxҠ$sce8#+F}OS1EVVH@ƆR0PRy.eӞ\qQHG W s:O`rmC'HI$g2jedA;0BdD0 b\%yK$s3g''Y 8匡z@R뭧S,Om%z_V(^yDέI] 3{jFɗ IRK|?_\LLjR렻렻;f_p3+4H` \ 4 i&A~A!i۞:\(U0ͩu+t`OIDw'6i& :?&i{c8y ڕ?X3řL8 q|Q|ч!#X0FwmE={ W4˼s+}Ƞ@^utާDZ^8SKa]P^Cӯ6u S^V (}cJr ]g GKY8\N2j/c=f;f+}2Vk^T\ a-hk"vS(YOkg|ʊix\ջ& 3&-Bf:VВͼ&Bn.uVn&!QiJx8&cb8bʤ|lؙZ,N-QԉF~V|n>r&oPԆc6qh_ق*>-ۙ6o~ak05ͅ=S,$GH2Ѹ+&2r.@\S1Uq{.q`ԎƧB !} Ҥ\0&[bMjtCn=bL0Dl 7M+_ &\_2 #8V 4%cKT9G7S \2\s}BЫX`R!gǦHSЄz"gӃ])x`[%`&xA`Un̩n:TKIgJ7bH>& j$<(w&K3+jiϵBXxʴmWijO.Pf{LVpe;AYfևt};5%IYR8 d<~ ;otbG}LSN )2d$o'641 @JQFfŊWYI=iIQt]We* 3)1"[Y(RL<Au $+VlN6W|֭R+b+ikVZ ׵ƕ}\6=1Ebə8E^"EkH)=+DxB^t\w{`[ ]vrr>YL|…Vx((>kwQ6T@!Oc1'' /u+'nLBN?:+6(tS~dlVf2-W%C[L[+ޭѰm,Pv큪ߦ" ޼v}E^w&nի*8J60@i]xJDh i ;PV 0o!WlH|E4MUNٗ,V;WWkVr}X%}.V MQ6%'X7oBhU ugϟ?>Q%wBD8=OorPFOgh;K5UCW6(gDl!{RChV'Ng>uvxtJ-(.U֢qkdcEwZD%yt2DZhJA(fԁ[=QB qggkģ!lFy`}-1EeZ<%}qA[\2x^D5_g). Ot)!]őV8w 8XG3#6Ƞ~R<wxbSؓ|毢xF6#j#G~l6[6i .ZbQ:?F<GQ캵(w_(ޚ 8z1 `njG#tEm&oǛ j&9SM|s>j?-psAbRU.(-垬8&%m:%bR"BAJD^2' }BD~tԱ*p\}s۴^6y;d aӦ &ϺN¶~|9;gI(Wgo|zr}NeO^z?o߼~vunxy"AOov~/_7\uկ7^o|=Q$_͟0NO|H ٫0"&Y>t;/L]~N[c&^`?3e4 /Xmދ&TX蘠-L)2[>f̚nIX ,qP椓p5SGgQ[7e p;,o 'L̅}U~ ʶu6֚9سP{))zjoxé9QkZERXԱאrՆH=LQEjV> ӐVl{I>Ne+*Ge+*UlҽU(9Zk)dJpwmJ~YxS %FlzXr.`%Kn]Zb-ɣD-v"YFyww$*$=6NaAF:~FNIJ9+v ZyY]gG4f[fd֙YPUf_fHΆόM_WDTj{ qbkO"ʏ/Y1ޑє[r| .Yj<;i$FzbU arW0C@aTQ5@P?@9TxTFJ;1?cw\b-/\ލ~ q\>U6=m;5s4\ֿ_ k/U7w8f #QJRJ³ .+JC*n,-~R**PPzJJj጗cSƲPPU;s+kKbJU4b?oK7YFm]h DaJ N/,.^ D>TI%cLRz˃DeTYfp*] hSNk цNz.E%8Lu0؂[s+' UXQ`4qD3TifPccO8f*Usѽv}hyYeJp+])M Dw͘~z0VNd~ p( (Tc© ǩ MrscBeAh'΄ߐX|@myO^&g ;R#ۣP]\ngԅ1-j{\p:3ezx{l+vls{PW K?%76E46%ԭS٬m%vQPb5ɮnC3juZ!5ԍ01M,INM+uSLJ˛Ma)dkuûQt pc֌rb{/$R~;%/|&1 h#Xme2hʺEOZYak OAbCJd+xG$p%P-~Ju%*M|6Ezue3V7>Ìь9D4l2^?O09aPLFԿqRD)StVm4#J3 mJ 5?#"00NH7݈NiFtzqr{i{K.ЙlnM@-2VHi]>FDӗqI7x!Gv&{h &Ϟ~J;yJz854x&F_-BrRHUߣ"pDfHۊ˜(l<$U;3# CW+2&ZӓrS?a'1sD_$Ţ, u;ihs/kYra;-PcDjs1+NS(Z0$^+_yI gTA½H LGeYKv]2NhO0sRHt+KjG ub^ww]|.Z8Rzs83@rㅝfu1c7o}ݗ0`a)ɋ?P׻@yDy~ bj8 z,}li6$"޳to柵= |vg)ɲ:Wz9;;``z;;mYj  ;1{4KN~14}SUz{J5j|;O~BJ»G }i߅yJCx˩iK1o×i_&=ܔtm9K]ܕ=Jݛdzh$z==8%s[Fq >8ewO܎!-P/ɸ-K2VJbJDfqiby#iˇV( D.n{#{J{Y L!a] {Qt:qQڲ4xR;7%V4AW<0AiDzigTK,`ڃgRv 9e""QFJᵒ†ĵOi^Pd( Rv _ld"Rųb~*T \JZ'8<jv˜#|u"pLN-d8e/!IP9\49d}¼ctUw[&|Kͥ͑B) Ec% QV8'aʈj'>5nbY{ ۥ=A=tkNH8.i%j!Y)jM?Ywj߲,pm0_#Ξy|/Qd 5)!髐5]XQؚBf{]DgKS*z/ocN.rq&,^pG3̑+2R%1C'jj1^"YV8S(sJ+Hh}E 9rp\mMa9 X5kVh0rM>-m53L+IF&b F27=;8%A:R*ɏ^ al(-( D{]Qh!q\l(2=]_WZ7Ttϛ]-S.0nvŮՔCw, _V31:^]fE}u"B%.t 4.\A׊Jc]f>[k7Fз$f/_YHb8‚_K_$q%Hr *'Ëy٣f#m ғȶX0Ȯ"#gVK*H5\Ÿpq R B #L3IPXwdUD+le.&%$WN|6(;͐JAd"pN]n3ϫ,ih9%w7><?iT8Y|~nE8GUֻKYZv!g_>\xbU~|~B1nSm9|Rq*;/X|G762ЯݽKE3}1" 2Sx$b3JXGo{6;JD zФތS2|jjJQ35mF GJ).#(=ݨZ̝><+5vkI-d3yX测2,Xt՗UT j%4ȉ~+ߺ{{9E#oӰ(z{3B>\NYE}5Pf"+pY] )]V$\k+JQт+ Eɵ.QWT;&ھmʫ6fH%FA.%R1Ӣ *30fv!Z_ciKyvrڴ;`Ik٤M%qN\/FnHvc-ӗ4zA+II1|}Sw6}BJbo. Z<6F62 bc[QhsUj3vWgrgq7OŽyZm'#D׽ߎ!7*<9>ob|055QgN>2Wf1מhs3V"w][o9+_ vaR$)g&쾜E-HrS-[ ,~_XEYTw+-mѲŖǝƃHxX-Nv@{u]?F7ϣ‚iΏ8JE ΙP >NZƾPt~ޛۡNFVN1(\"êꆊYܬT7پ; iכC*Ѳes_A$ܱ-c^knc3^aZv0|7kǠ"FZiBGu^c}[I}`4:;8}Abof)`~|Rw%XovԫBΛ>)2;@h\cdVKUUzyʔ %XM=Oݪ@X.4:L5A@Ky>_O$؅4fdÿ%C+U~hgk[+HxX\~qyϺvH#רNdJb>_MR?A n~]]_|nTBf.3סbR@Қh-bKwV;7\) i޿טrUL41uv}QO5|7E=Y>s}"W-ox*p4Lsɍ^}mۙoi/i٘v!+rlgZHϏܫæn37[~k6~ƞ`L8^ו {j\7Zq(RϒQ ,Fm]nUF\:\|t#w>OYP_՗a.?4!˟,e1<$!T VIQFe>2m7\N&%i^zW?Jn恚Y{ա\)> Ɛ^hJ^yj/@1 e vta+"fV(;x2T,d0.£V)+.pVi(#Hy\^ȋ^Xw||XU$}Gp^F*W_Җ6(-wGԭs55is{﹡ vwܻ靰{ZUZ>$UYF_\Ƌϟv/lI[i"#gY4z/h;s)-.m$Pݲ}x>qE]~5Z-+оЏT#ZXLMX4ㅪM0 =t\MQ(&~0k!%VBC[ -ؾݱu%J"FeVuv qOfY>bEm2 => I/D>NεIVO砢n$-۾ mxtyj -tǰ*Jb{'iK}aN-$GSXEaPs.c|ߺHX7jR{ąbw\;U G +B=ڃ$J<#3_.N65N#WgKLgUf{Rrm?p9 =`FWdF2\mKǁIL neG^] x)zh0fGiWECWw2נ kdۻGK9ɐ s]G%\./v@-JEx j7mDORy˾?QjݶIm$ńPg$[Y|45n3%Me5|!Q a<*6&+!,fTV9\9HҁemU49J7ըF˜GUHI-Vg ixquI`+[Ȉ9M2Gi–4a$Da |5a=a0ѰAhW(X YSCL'@I Y;d<]Zhu}B*+mFSϫ /`/p:B!xa@3xVƫ@}A:# BA c$@PI0B$Y :+Bv1p4VpZr2 hJa;o eT0Es gLaf^8,%ykFJ3[E>:q X %iRFnm9.YU@!M$H2r̕F/ɸeĂd NgSN{N@C B>h6гjH*3`Ee=7:=9~gz&i6}~d7G/5yy}vB]Ă⡧|gFdvPNS(f^΅L: '_nF#zplTjEARٟ$R͸E'ʟ*Αbq; OOOeR΁ @"Z8 d֝ FY,\5 cFjʧNNJLOfKg'ql"A[UH[bgJ$4SuT>LE)dX21-iϸwQ)3Q7*iHEUQm QeWiZ[nv2GgƐ`3u* d@fV8 =7ۈnDMXy]XO. Z:JȫH(!=u,EPQZ4COsjԸ\̤H o | RGRrrLtAEDr$dhbʹ (<:gI$r.4JkI."C 3*5Okfq[q -,eP(#?WӇ|"$-VZ2_>4We2DReYc (L `x߆7W1&h`˰|1 )zT~zJ5c%U1E+3,l7/Z-<},Z, Kv*reE[f82@0rA8!֥<oyZv Pz,#g=2ErF$K:yl` #s".7@7F [M3IEUy^x;qJ"@R0MA`z[3 8nz\US&**S`" : )'U^5YYҤ)"MZҴ%"&o 9Q21eI31ŔDQ(.I6n$"]G_ af,2$_4FTekޙٛD< E##/wqFWCqŢ$a 9,yb{ y͒ 1= |Y4q΁Ad($3<sxKqt&)\oJRꔫDfLc*To^0L6+B2+*\#>W娥t '4>nyTˎ9/y!" nc:d>%O糲=3T; Rh,msiowR1/sհIY2TN%{ /gTI9Nx\UH`Tӧ.wY^>-JDq'P/TK3'ƐXSyNm4~+R=N3ԣ o.$jk)9|?ղs^EJ )u6(\ q#IQ9YHM8GKi`g}̛uJbzE0 c}eo/VR j-zi'+D\LD*/;Z]ٙZf촆9 ${cs_&[Cy(&)xƓ]G<]QrUkەخH}kj ]-{/fC@mIg{-osCZC̱wg1h?^I~X~|ܧo,< ϒߖh'Ǯz98zN*xŋ,͌m6=Br.휷^bqDI-QDXHd՝s/KtQiQpx^Q\zܑkd.EM_?,>huiXHJh@R}8w ǂ~+5 hB)- DZf8&dfȮm ®LN"vidpyRżƜ`s*Q)Tc j,Njbi?"r}>;:={IB}thľl:RrBgu`wL{ehG3_ZKz76Q{ZAZYqԙV!IlΥEL`gԵfXdGH>'0L,Q"p?az?N4=K?\Ll~WЌϿ¿ȁc>21z?_&/>c)y|xswg_AZ<&f:zbư5%ܒwSt8EbP?\jRbV*gKkFC޸){w¢ޭ.eT3xc kڙw_)ݚА7ut }j!Qt|2O&RRp8( -H]ԖpQR٣i:TߔS3c31˓V> _[> _ޭ!5,JqWQ?ԃ>p?6P|(>  ;ۺOR [ ;gՊ2<mV&%FIH@jkSj/5)Mi5Nbٺ1ؖ[rP;Tc*r›ɹэo K/ fZlaw"FDx|O?ǩՅ,f~wsqLFj*Wg_M5_bٜ/7grwfFov&,V"`SS6H خRkpm!Mٙ L0 ɝ˜qF)9"GSAOJ$4D-09nmp _-e;FԂOa[(0BĩeTX`OsO,HXӀ,%C)T t:!]6xC2 jZR=A!=3BSpN4-TIt *H̴߫4ʔ %i`8aܪdwcu§z<@ȏu-`H<Ž 'c`b ƦNJHSV(zan%t^B@ d_L@n TpL=s $@b"W.V%対<hp6%WIz#.)D )d66%w!$~5 efPLDczRmiŮBZxvG1lX0]=_ ||p}u$*]b&!:Wwތwߞ^V0hý?z W3\T[feζ:!>^QseTԥ$N]ILM6\͊?ԗKc/h(ǘX? sE_OsAB7S(nE TP[ǢRUI1.Jp Q m< ѿK3\,n0:iFהb!TXNRj= x1ڊ@VI%)e)d`\䓃( = `p{D~BI 8&h敱CP̮:AEˍ_oA%k"_s T }IBmkh8Ӟ+.f> 8!oOGԨӸjU]]=p .|wa[ǧވi֟ ?}~`9~b ""A$6La:blOq3@z'Ϳ_Pܯq2룀yE^Z Os !4CHl1cc>wSpGdz쵩K=_@2EQ%Zq3$hUYsOBa(ĽP-L-g4vԒv*o‘$&h0]AـFgrOZٹK!CҊ02D\;(h|qHJgk6-v9E5`x+ x\ !03xJ(: Rjc $Z`4ш@0:Iт%?y}p%p>Ա^iLk-eM $a@`(-S wkf +g"/}JOԃ8%RPoSd؁ H꡴zEn(#9ju)$(uYIRe"@ -3ty\QA3]^% a\GRpr'?@878w)$ALPI+\",(2Ս|n{E QPŨbһn֐ -U7ԝlCJaLMTW'I wfKWDs[kZ6{d%CoOl5We?+-l?R+5g^'V1 **5_GEppIp%BffYۜyq2*_fnnq}t/UzlʬIHoT*LuJ Zhܧl{k`f/aTw5^aҦC!p;k/?֖º!E;BSF {.(W: a8a,tF!)L%Zp;[*@'PR+DH[j`m,2F 8D„a.$ҳ  ;L).NcJv^a2#x"89QKF}/cWKF%&ӾRoʂL(1 6J,Oqfゴڪ`BΉ#T,s2M F=ի0?A yC]jYBOQ'aEV[bct4!2"c j/yqfAe@ J}\u2cL[xQ6x 3}wq&q qC@7zD ͊iߟt ۾A޷`e2Sg;0+Ewt5($˰Aά|X6)elB]MILĆ-cS4;ԎN#Kl#`d8ssʀλ( ,hI| K^ *ŞC6Ԯ$'7MUo?.^}xԶ*G+<6iiƠZwEQQNhX8&:ZĄ8yqyq ynkġ-c7$tS(*C2J'-qZBLvomjQ[8@eK8v /hKȟM[1ר%O^B'QA}r<â5~mv!On|(5R2QD%.]uēBkZiٴq-F$]j/AI߅&Mj㭱&'#OF#˦WɱRZ@} |"R܏VY/tV uWgtj̴:hk269M:9h3!KIƣ}/z=wtˮ8a{4_ojM}b P!ǭc7 $so7UJo#T[1*MKu%bd4#IMLhTbrY6bKU,uRB9l eqʅ)$9csm \#"qS2H3F (`nG1Zf;Y2Z[LAI")al -d&ܻy]}Ü*ՙ:78r')$a$eJ%JKd,K22ƣ{$%H1' WM}*{Z#\]ԅx2H BDzÿHKҝ:XCd=34+Qki /ޡk~*5x[MFyF#yxk> >iL-?*B*Z,B|Ow8pEdOG殝ތ^ 1`"A$)_}O?ܼgL,|ŴfaGa+BcȼׇBqV哥 Y+Ftdqwl]C߱q8U0;[S^.MLΦ֝/ j2ЬfVd\.< *"N0̉*ԟ2TȜ*J1| r94HPF$j)eCSb L]Pf4%ɨ4 ͜=c BS=tN[n3VPj;3 cqZ@TC֪aȬRZQ\f*cvXgJKU([tLMq蕶, 5Duӛ'jJ=jP 01/f`.ڐ!džX,\&CX*Qc󭢘α\1Ӱ Ǎ^dT zeHe9ة1DSsihȄIȱRd"΍ZHHP#th"V qWhˉkIiBvXS1l9aV̀HC+LR%7)mLc0Sד.An[*ـK19 ( IKg^+DŶQrzZ~+CA.*UdS@h]NH2%j8%8guazSUk;/rX~ͬo|F)xO Gn,1HiHᶥ[DC[+_QkTKE ۟j ܞ2dW@;6RЙt k+!96T@iu L⭃-+W`]lۃ5-Ɋs)\4y1YTPr$}$GpjZPmB۟_&T ($*07 HJ}QzohwwW)b[m'oXd zx I)-_Uv*PRCae0I_礉*,i A*2^6tnxvqQi{]wybQ!>nmJFG@DĦl2'I(QR:bURcHFITܑ:4 a˅A)&TɜLfI,76Gn˥4<E%D)$&1(s5.:v{Bc-$ǔbx2q=8db$! EZ-zGiI)6!2C(€pa)Hšiu z]eAۡKfea=,vǓka8GZ78p0jRS8Ű*)bc{GQEDJ[$#]ӹSnSCKSO+ՔJp!T. o UXO 3sI!|)>ޏǣC'ůBůwldՏ '6`XIIFCqF)VE/}± =X<}d8myI@̦"`l1G Y]\E x)6*5$kaos YRޚ|;,&3lv>SKdۃE8_"u̺޵ֆZDiD`DU|^^ݾYK')ϧY䘞W8C E@Jp!bCVFN<2a0t!A-(b]Xîx{r"ˈE$uװoμ3Gnss[1[/Kꍜ4q{㼟?[*`րM5褗(9Z2ZIWNFDT B0'|JɨT5C |D1$=îfP9%+ NGÌ z{-cE- TpFGgb&Xtdknu5s*`II/ ϵӭ#rL&.T /kQp3DPxt;YYq}ph D,ߪX iTU0mFrB'\)L: 20"\+hZHjj6L^ nIf+p?q,@ާ1v>1nӧxyrצ<Jaxz{Wp+T̪RUHGrY6H^T3JKe=Mͯ-, .<4yH~L^oGs?KtOn=E⋯Sc֤tȍoY;=L0>t6Qzǧ\eW>scZV>{zLsF??ټXTآ(%[qf%V.Z\DɔnXROb%: QK3PڭqxZ\DT\\J򥮌vηbBB6&8cUx~,`$K`\:[F}M_ۅ6m^yQBbcFI $nqa'L2uoV^h(IH)mudXdoo\1QJқ, |,ЁBc%QbFYQQ.g<[Q:`3ӽvk:\1 fE#N7ﷇk3LƈmNd[]%{Wp" a>6A? C#8$.|BV>1T+xÔ)rf-L1#az8w6˷&v?-h8q㸰 m>mzѲv!Qŕ,ff ҍo|VjhI9?EMSZ䣥)WCC\Pm-3XQiTGpA3BZô[hL8<+rZD "Ed.m$X8\9cc4 b#n£FhQYVBI&Fn f^Pv 'T:UP í:8[hLXy-VRD$Mc s(9b&Ŕ"21- O<;}#נ u9DEgA*>_Pc<I\ZLW#DE% a(OX(^-7ܒk 8nsyJYn57|kB$X%劗g6F^;++&6#Zd Ql҅a1g!EeM܌AׄfK*ֶEA-h|UY(0`p(4U[kK@ ň 1Ɯ KAy b{˹X7|?53E]EFT2 4E*-eH-Ȝ"`ȑƀ @֠R0;U4{\s元Q574 Y a(Z`'g9!X Q,ǛZ-(h܆Vx &-kn抯y=l1د< Cͧ@u]>C*Hjs25}vK$zJ ;R;|oX.sİ η#c&ZR Xa5M%3  7(g0g1B F~yrTk(*^yMb]L*S(pW Kle&DhIC0m`EdE!NZ,JV tD{~iV5g__ 1:df 3  GUl7 kg֒Kt DcHK˞"2ag&*BQTjJ'[KZ|Xsz&0˼l쩁p؂ip,XByi0us/+ad3}q^ЃNGu񄪾 L溋ןlT|D{ t l}7:kkH=gS_urB_SYǡpb lYOnȮZ 9kyF5jGD 6V,뷨A.|iOxPTlه _6f 6p Ң'3YS4W֕d 4Egr[9AW4z;ʇ@[ ` {/rSX=ryՂ!`(p£$&R؃jģAgdv(2fBsSUAᴷh%ؓș"/½ر{[:OQ_1f*84?L~~Ź4i'}%:I1Ϝat;{&X?hL#xXz?s;֟*~l$~ Cz묅Y!sL)]8z^;p4!qD+)Qn ݽuU=[]jbx 017#TwWoO~C͸ArcR^91Œwpk\ YLG ~kL;|߼#9:uԳ^3 179;Iʜ6*/d Qmy.eDN-K*CVTuq<iB6O'$P J4YK9&MJ Ie#~rmx9BŔDMi.JhOas9<+!qt]skЫQ2nlj"r_tuM)t " 1ה)A%׼cQe1Lacp-EzeD`\EauƆ<37+0VH+fpPdUg5Ȥik=_?w?L*(zl>,ArހP"5b}х9J/.~I] iyO¹J{(+J;̯8Z8 }CDZwy0w= $'v쀱ugop07y.4J!{:FZR5PGGHge*<#e@UyKX=bF@>{Ƥ{aWdX 5GRk4vYH%mMy9X*rƢgZ<J 3s3 m#|VK&+m#JA_ff}x {Lwb| ɲՑ%K$tY,աR"H,*>#žՠ7ػG||Y[;xܽ{X7 Li*`J!qI~4H:+(xA?( >/rN Nt\x0Sgi`sP $jc E ۾ڟnvQ];U ;ةNjDjlWitzCϺqs 1BVbDqA2[XX˜7}1qf@T bK7EZ_3/oJ+ b| (.AE qaA5֣Zv<=y ys<(xƒ#d'0F=> ƱFZ^Yc7bi4V&rBdYo5Ѭ5l@FA0m ?w-i`+64mwhh*mz2,Ѷ?ѳaot0cB '6 1N;Ⱦl8Y6.dM|h3Ckh:P6cW@g1VQWv7 wZ'}2Ui-%m%hX ^] 'B R%dHTYsMy'$C O\eRxY,D쁛. jga˲`ML@LfR!CqS ! tn/)RWxqZ A²/(^ vJOYKIxKD B02H'"9uO'f0RYh"H / G`ļeEv} }FbBH4 FB&NvDIsbpٍ,qsJ{>^RMdv`M{=盽>[W|Z|]_> yȧv4茆zVޯAF?"cڽփiD/M&R4>J7Wy#.71? 8{˭px&B;liYRLf</TuV<٩L˗_f3%,$Yϊ$R2auvT+ne1}n\g9i nUH.r?&~q7i21[#w]d !jKJp`0u73wՎyrٳ0~ O3X0IPM.:ѫ^7y5XPݮWK @ɪ|<xKY3Q(9iXpOa~%PRJ3*p!G1 Ą  a2#8br-},."Bk;Hez=gQֆVc+,`~_HWc!\SZ4;z. öQVz狈z\^t1`m ,]j4) ySHiPm!KWLZac"M:87u\LFU pT Yu+S!e7@1:RI,FN DLqR5 !-h̔/d 5Z/% TA&wKrH4AL(#R a674Ir%KL Bh AS@5[l.Jq–0fO@R EZZ7J@a4Q\hi$Ji ..$ @*lj_z`g X b' &U"\IAJ)a)˙&!c,E* 'q$H kdV`*'qz (qRQ5:NBynu@Rn< MJy1v t F(4uܭ `XXJF5:JI JQa9K"9 vX@B~p-#S<!օ{MqP BDlE7c2uk@V"LA@)@ml$es&rYa`B-`a%噚ca] m;5_|؃ ٝIέɭ]y`&歸mjYJ@ӜªJSM͏C uVȋ% Bx-yl 1&Vbߊ.|oE:Y9jXi6AV@/ lxm{_΂fdTxP4Jڹg٢ㆍRA8Tl\4mohh,DXXi,<)F^]1'ѭA RN{bmF0<\egz13֊Zc 5%ҖnT>eD=@}.P ,D#ȄC LIB!$EJCh̹N 4F%" LZ[!Xsti(bÁDX #Npr tH+!!NQsZ?s`/:֍f39,Q$H4$q{ Ɖ(1 @QS(K8̤ 6qceR"Az8_2'ˍ~pىh<7^ۿOaFgx~[)%$BDu& sqDg%N0Xqz2UNU_f?ILܙ`PY-),prX{3MƟLtyֺb<:6#kvLG2M+dž\t~lh KypWgx)NPcXg E5*+RYʒﶼO;oyn`J6pـ4;8p8^8nX yLFJQYV2wّӞs[?f ~cGpꏉ3}g=xsbΫd/A/GTWt>]i;߼`4蹯Ն?HR-*&)HD`" LnX*؍Ţe>'I d9^s\bQ.:)0QT 9/h:ۂ U1~oH7qDL˦m_*mep^|*9aԯnN j\'kv]Nje9np[IoZQ>m[^!Ip借v1bOg9HirjO; 0eM]ѻZH8QD 1,(UhjV' fXhJ1)K vd- 3.$8̻ @]+Hďy-DR-6c41CUrGA'6L cD r IyQ@@52؟A1˕4Cwnt/Oe,R6tٳW;ْJb:=b+`X S;f3}d4g&j7~_=):|I 3Y -J}'et7nUbKw[8P%V HTBB$jdibܩ"Oe./Zf.Z=~3x@( 7yqbKߙd߻Ӳ | $mEw8! IM R%I7"-9mU,ԧ RjYPzl ]s]/vB7O#Uy]Bft߷O arMϣ28}MU0SYn2p4"@H-R9]+n,4Ryh@m@I јlCAv2pۭ%>X5wժ Ȼdy8ascrX㿝EoU6Ppd9=7u-˰qr+Vnh87D*e!&hQ7 X>2rsyJ%P0٦I-%Nn]?]%^Sz|b̵;A?{wiߕr AµCD쮵Vtv9e6L8˥lAf:d|Y]_O]NA֑ k0o}hA5Z|5gVCOz^tX POp?(Ɠfu)7O~ 'pm,wmmYznHbO vfcēKn-1DD)=ERb&VMa[T;N{I%os*R_qd@%Dq3XF Yk ' TءvͼOr'x0e*B%_ľiApS]㰓𬒁y+J0[M'ΐ AXblܐnHKyÖI\<ǺFdλkw2]H .( 5ޕ% CK (w4= ϒMԅЃaF¡>spp f^YVuvrū;s97[cw9AHkBCZKbWj-P0@]ĴARw6[RB#7ɨ"ZbJF㵵`ErMECfP䟒,>n=\P-dK$1Ge/L+*@"GU[qX-nyjNĎ>L0lmtw MP`aPQ`vݢsT:R^oD6YpS؆9H(QDm ۀ$CBFl }o }(z xKLg}jt->y)1H44l|T x7T`0g*^05 L*.Ų8{ŠILtc9w73+xW7\N><,Rf멛ՠA{5h{ -Hɛ;y-mP"TJ: ׵;bݫId7]DaZY!w6=. -ctr\Y<3Vѱ#M,靖mS^<1LN3ޖ\'#WGDQYJSk1!5jFRPOoLOג_B ^ Gur yuST#Ԟ[ᄉ'߇kShyGj1%1*TAl\=AVFW?"BiũJRf~Z;*@Bc5]`v3~4SӚgNe'6ΏR}(W~0KV~ ib_ ]d) 9-e{^Eyʻ?} ]\?`ۜ= yYd()z-ao EC,L-!%r-P!q`JշY%D9@paVRIJì6SwgY nD[˼in/i/-g`,R<J)r"H9;ռPJF&HsHΙv x^ޕQن <'fP73۽|z_Bi7L> )\ؾA_S6Z40@%(NwcBlH .jh_uGTMJ!@eq@@2`-a.(N- JB< 3*i݄n0>:cp+ i$}VV}6GQQaі)s!C3(k]SoŎ3#1:mywwѭCmƓTpg7~dSq|%JJؿ 3 wė1lQct?t̆f33i bip۽O ?~g f3cxr\[ryy,S+q[nr(r1Ha1He#MnMLnup+g,uJ:[.);Fv Д 'խ[yoOOHu+[9<%|xn],Rb9X>\5f0ךn[YtHRQs#JW dyiW')r͸^eP9>` ?IY%C!x9  "a ΌbwXBK4Ke\v*PRRb)g""cKO _V$ӈqzI%QpZ1Woٻ-24a4ȕw.eĻPc&Ϭ[q0H4R- p3=NG*$; JRR0 !Z>R,(Ap)KKtaќ,r~J!Gr}5B+r-cq͹SZ@/SưU+ec%}3ACԜ Mɦ;ʙ%S^z9%EqV҆HiG-fI{:1R}uW*.ҜtcIho1k ]_tU( -ct*IVŖ1nӂhM#װ$ vuPiGƞOvvEZ ±58(+EC:#ԬieI A*EJou9Xr/w-e`Dh4? 5$5'Qݽ)RTS""&Z>@դsE#ꃛ;*;{Cj VΏ!a* *оR3P!QPKItiN7UqU?%ԙn S.KWt%Sg6sVť|)I+2׭[nxk=:y(bXp4 ;>KZw] qt IW_Y%kĢԛ-Z`Y9EּaJOhkD):‹%Z~$L%!ℛ(x[&Ihg9]N! T ּdJ+qWG ?k3Q; } t1:sN8` KL>/O,ߋpmnEuw-AGLe/كĄz gODl**rn?FXK8g}{eZYxL.ߞ?=}y< 2жrV;>߼o,Joa=?gSڏ}ԙ|iC4a7EoΔ3\aV`.ƪ:" *h9!0m$|´8kobʗ.  çaa#̰!G Lr. 7P*xF ǼYAa@Y0wSڀSu9su>¬%%toR%m4FtMa4MeӴ, DH&*^}RB#챔q< BY/^F.Y@% rߟ?؇CF0014,iAk tH+-H!aIP%"V8p 8 <'kәiә/֔B68I2 pxʅFnjsSs~λ _;Uډ\վJ<21s${m)gHYĜUH'Gw ߢB5+Me? ugCWpq/o)12SQJN{^a'=@v*m {?fs^| ?{>9Jg'x2c&~o 31{u99(e.`LJ bܽiʄW@k ehZۢ]rY֎z,ȹ oguv^2)_/x.Ђ?{WƑ /^E!yX;}RXHY ꮾ BD2++*քHIz(1Ҫ5ȗ_Cnct=< 1nP,RlLD{_u}d4WI(g,}jS\9r +Ih秿%U~άHX.MB;G- Q +Ih瓿%U~JIh%U~PH}VD2z #$) H D 49\("x֭Өˉ E-eUXhںu>]PTsLۣh;k[0 E%oQVhںu?Pxu3'gU{R)h&v<ΫsX'_ZuvQ4Sc|jO'U 8yhdeqʳuB(ĿmD|5?,9!n5AWo6=* l0#}@)t{lL:nxwXfB]fR=61ӧ&Z1/aւ(~.G`lrge<7h9)|uZ&l/fV%#?D>((ZF('}hT0S@zLMIy3R"s8!ʻa[aǢcCB9IY 8mQT vB NE_/ia^"MHBا9is 8cmw,d:=ş01+KigͰd.&`t?:#2Xd5MU<0 X'A$5a&IC`XP0\X Ui2i>w+9c`RdI&p!`oLWd)ק%>Ϯȕ 0%Pd{e[9%垔E=;RkW΢.n"^ءtsu\(#ݺs74ո5f)W΢.<% tAb`:cn2ٚts,nup+gQ'ʺԞ>nAb`:cn]ޑPy#iZǂ[J:8䕳Oyn1DrGSN`s Yg=Y.Şc'd]GħdȤ~ۯ ?TN  su.vB;cNӋpj b'\ZwB ;Ν Pջ.A. ;材Kr6v\섋PNU_ֺ;b'Իg'H)7p.vNH+|*VQD;*iCff {c>[gYuFkOscMIWƠqSJ%Ԡ3SբER&+c_)r怬NWU/ E`ͺ\!Z3 LerA1a^ťg2^+@CrjLco6ْYMᕜSsdWz$B2@ ,  ~Dec>Dx}=D>H!:.-cYgʡlѺK7*y3b9ame3?xëGoy5Wۡ _\p{XyL~^ }c Zg>aXyuIaJ1F= ;Ł`=V Bh%TRqMuGoDir }1xeg4kZZa IDYd/?_-oWA j_5w,c5p Ƣuf':KQmG3hc؃Gّ~i3D٤Laư}΁oE\雠 R""UѨ:ؖ;>YXr&C ILEØPyP(I u ∡h/aHTt}.hoƯd0P%1PBZ͍" apc[buؑ@r)Wp@(U+S*~̼(Q(&$4˝=\c%&^Ѿ0LVcq)X?>[6ncA;j: hbں' O3 '|^yJ 9d&{ƠyAx,=cz*ḂA$ݩAKJ\gt!Đ9"'Z$HE¿xFpᔝ>G)g/E]#٨P1\s3}rr^?  cHL91c%H3?i=A#nhINjOQ`*~ <yn [7qփp^1|qZc ϟߒۛχ"IBWIo /(!JeGg闷C\z7 6}{0cJcLj8z[iX_*i;i T, :7-~ЗYX^@0%IdHSKuOu 'a C&L&Ƙé5@~|s␍|gM1JݬnW4=pjִ(m8GȓA0< ȶ;]^LiUG}0F$3%Q >('l2@ Fع]l,1I1 }_.?3E D#MjZuqQK|řߞUj8@SBקYiNi7%]cNX9>O{CQ) "Ya_=VvZ8cȬx:iN['McMGlu͡kˇN)g3(_x'l׉l jn̋FTb='Arlp6~kmJ32oB  7C=\@3g]0E}Wk/-⌠%T򌠪=T W՝TR&">q (~hVgmё+SkS l䱥.Fc9΍4I?h'Gjf\7pBWCi0~)H+ҋGؾt5 uG]cy*7-%A>Hq'{+ /\d9 g/Y{HTYHۀ~! R"[p-Rh740x~ӏ= ᯓWof.B V/uT̟K|FVbJAbO@,&z{o~#;ФZڕr^{ H0{%lBAvخuv8EWtzvOC#Si'&H۸m+>{iZ q%&LSCD+H)5ed3[5N'H~.-V% 54!/*U24Kg概mNj;n*QS7'.8P֬rݚ`Q!D5tq*UI4NB4dFܛ%H+$U RAPҟ4ncW!cׇ?͈DPX÷BA2կ˟<{ҧ=>VTд _(ov^7 Eoɨ/B<+Eچ,꯺k!)śә\ݕ[DCmducqAaagۯwoG맖xZ\t *>=/js̡UlfM۩@t58[ńun*8`vMlkS[EDkdvg pĐ,;(oߖ}+9W1 A@.,[#4HDW]'&@Or[k(~U@L#-@9B;D^IX1Jlfm&E\z7E(Kvq5G y7o߶,6$+OxuL; K45Wjn e6>2\|5{.LEk>+Xgc;Zl`:+XP 4Â*mӓry#J]DwfDwXIdG<I[bF)f7ftH VA)]}0 kMh]dS_3,;U;4+uEY@c/z~@ݗ2G(g+8̐ٔUD&il•K6||J\Me ^V5ӗk3\bB4[:ئh~=ũ\xCN-ѷ)nާy|Qؔ_3)ʴj7V+%하Ƣ%_L.kk)Z B5:]Jb[BCp&Aڒs^4]RK0߇鏷k?M @Um*zB`Z= +i UK5u|n*1^k(u:A 1ZR'9'x2|{ DGCS[3;CӜ'> B'Gi9Ohb$76l-HjU~Z 1QJ|NXG7$Ϥ$U .jLΠMĎ'.<MAC(JSEG>6!r}d,l%5Sk2^F3) 3Hxq_{t6Jp+]Za6㯦1Gӏ4ӏ _{𝗭SY^*GRK"%/*%Rt yw(VJ*GBH:T`"h҉Zb_kin'NE=נC @h4ƀrs$c| -K/x@|0`Є"8R+$A#"-hb°-xt6 sPavܒƝݓ<`szeGƳn ~yFנ& qZ,sܝy`'U:g}?yA߅~0"ENsc+u1-d.GSw;zD5x=O߀Rs'_/ojJ ?;ylzqfA_MK#5 Myh ;BY06 irU5:&IF@V}\ Z,=O e`!)K[z3,\TVBwCagXKjt@s*f 7 ѴIf<{&gErj.ԍ[t>_L DttU"uS+{;O% s2CEo .᜶ER1tRH;_jAk|6!"~@1WK\t[Zl]q";@l^{=z'.W\޼ e9WO4QpҞ۱SwkS%nҵV}û]+}+0.BVKQ`WE rkxBmbBX0,^pFZ0<7 Q5'[*v]XApk.ºY%%DLI2 $c"GPm&Z) + A"'cЕ9#5+$ C}eqf{Jɐ+e'L8h0&rɆV0QM JR_=C%l(uI0K\>J wWJdQ'ZH *ʉsTN>pfXAM>rS/f\8,~E>~cŨfbOqN~@ ֢ŽSa$FFi"xZ 7FnrCͼ@䘨ߺ!jgL+ifƺꎆHp-hސGw4U\:+fE-ɻlָ=ԋ"03%١+]Ň&JJl$U[8Ղ 5U"e{&2Ϊ-GGbUNA7RX0mjr+bt4Y c_h/xAV bw,9m/ܧ!P;;dezBSɏ<~dzf*)|)Ѷ'b͛&-:UQI0 W2ޞ!Lc#f8؂-fn Ej2§Oh$6:Gp`AF`* ,a@a:ʃ1;/ s?lАG?|2k ayZ x):J8UURn>wbPZBTLIr&j*v"pQ9vH6}0ekkz1-%].@ʤZm uCdKcW̊Ū2R tdG߁=*/@@j%H *m2k SBK{Y :9/-xS"Ii&;2{•.wǢ&YY3~3{ί@5 r; iգ~`ҝ %#[u<%^|uLkL~+BY`_uHUqMנ\jʛxKr)yuu.o>l=W/;%އ],p`&fEpJGoDGPR8s^q6J![Y&9Q9 pAOkʶ;:Ky!S?_?x`"X3^؇OF"p(;,{? >[H y?GTPP6~uenOQG (0܊Y 3&LJ>2`CPĄGdB+*-!ԧ_I_֌l[7&~^讘؆1rכ3 *+2ƪG&7(YX됚 hJIۊ}6|r ˞bpe_s 4 A$dyF!fH YH,^pD(ޛr Y[sNJ)B3q-IHc)ΟĈgM2o{gF`NjkN0gFu:ds=v? `O}59~ A~]ڃ.DJcs;Sݭb(~q5Lթ͠46zk pǹ{w!a$:E]o7W}9,vG曬;vφ/9|dɫ%bhh{ӓ1{,X*<\@Șڑ/ȑ/`eDS9bQr pNi-Ԟu'O*v\McsE%H;T9Ml)SIIN IO0'#e ιbV)52}@_~FI)KUF *VQm6J;Y"rH9@  ~44Jqr.KR?qynnO+vơt)E£keاI'40C¨pX5wzᅒRS%Lkꌰ%I d+8 \m+=S(N FN;F5`v1Fkĭf^ibu|@߻c͖M6%:,&gKZ0Tޣ) C[ x٧˿RbΔ|mGTsO =?\A:IdA~=Aza^Fl M;K}/5(YZ:"huAZh ɬ,'Os\xz2Bc(PvlMCb6Ik9%BB4N+ЌbnE"!JRm E֞R}= ggfhFdC>T2@v8%)My^j67nWn,g^Ǜkʄ7e/gxq֒lfݑm(-ſUSi|{[x=իbs}WMtl*(2.O9і ku0#"/7 qF+V9lHT!{'$QO٠9U!0Jv( &6G5O1y6p/ 3b]qMo^C".E F.ďixD?nd&N2IJm 3h"`u0-b 11Ĥů4ÇW~\un75~& 8[:B+mv's[;֞{f1TJvT0A.}g@ ) [Cm SS>RrXr].b\3%3E4J/gBRt:ə }23=»祚KmF>ڎG޸RIT[&1v#taLNb'3F3%Hϕ}BQRT]n ラr # Ď7mu{Mr 2\!bE?jTm^)cA(eTнTwD}̂[z_a99N(%;ЬfⅦ$hz|n{_@~!lwc~3"/sKvT\.?jΖ_:lo*`~\+$f0l'rg9zB6e6Ļ۳ۻPܐuwv?DgJa-i?=v5*h9zmdbYW9-'+ !iqkyi;w;4BI4Ż10(+juGG]d-gӦ[ VvldPK@Q=Z7m^=ӦEdNh1bN /FvD=N|Y3&g~~khL߅2g9j*ISцyL<$ 0쐥ku燫Z%Aɚ?,Mc6}d#/4z8mx)~Zf}GCw?Lϯ?~B]xIt4زN=,፩UK8oBT #.='&U }F]ͷ|_6鼾][{;s떴MR'bjq4@ݸ.IQ/t*1~>E D 3d!بd7E}Hm L;I+4׃.@OOK_Dϕ/?e |-6Ysw7nU0bkoR(Q}P:ƒuc%'wvh~{=Aȑy~ HocO L!3AfjPСf OWFd9J[ ">K\4"dIkex1k J4Dz-jAdlEv|E*U,ZWuѪw QjSJFQ8b w*X^[:PQC\}A4'K;ꁷ糴]nTN=7 석<f[jSYt-իY0ʸ}{m?7ՃgNWvկ͵{UѭZM7?~jҢ-TJ$ɐ{0,"̍9BDܳi$ֳ D ! 3A&LATɨ\nŔPB*g#H4PB'9棒cJ/n%HcU7ǟpmiU64X)zE5aDk@MP Nf\ ?ĆB jgSaj̀=EԡcDMa΢w*YPNM[ahJPNJŁO zhGOF\Qꀅ9ěpnM)RsC $BYө,f"fGLq}?,Ѣ{X|~ 9-%h rAh-^Ko~Wk1p3uamSldbӉ ؔž{dj(SRZETJzl%5sko @k~t?j(b#Jbb|4%y5$(5mVF.=|t6=<>$! ovۄT򄝑çm cAଧBN_^PRZߤ~ H$_׆rhхRq#KCs|}+{xD/ޔ^_{"']&rV*ҪqQ vmd?)}NJ+mE]2ݐioc)5t椪4ԋ >–]hIJ l{Vuwa(6>{}H Ryv+/2Al%1 WtmnckJ#h8֧˧~vᶧK2z:=QX;lƲ>wP.(9)s{4 tû 1OTiT-Ќ#]h\{/97sHLi[=:1~0XB1A!)cAA1v$\F5=?:P5ۜ57AsG oaҿɀ!A!!Q`\vm ^])C6%Cd H!h km#.5ŝ5~ZIݦV,fh˅+MUA Dgt֩戙hVSn|-'V]o;9-nZSljWg4x?TPZsu~wq}w'nj&38!IBpww^[EBHyh !yRP6с5hoX!&B;;VkaRP0r>^ܡ^q#Ya^{vib`g3d6/ f{FHr.O5%[DZM5EQc%[(vWUͺԔmA(*Lբr "U7COF9 Y̰{SwbËip`;v7]XcjA(紡}p(:jCqn^l1KJղ[brG)QRMװb|??#Z]~\wY-Ds?vg& ˜ ATaoUaKd>/wʁIm?&X?LrrVÍ3 Đ3 T)yjC2ҭzJLu8cMvENE=[g'iTG_@?y5O| +_齹-[a$Z:Eg$S[M۟XAn(KTw_zȢ9eelj4粏UMDSD(?dnRډR)Z@D$v$0^[n:Ӈ[WE^ኻ>E. ^)2YID |wqU^s,C>KrIBH "Y/l @jgzQ&W@'e_)O]#z<@׌u/gWi~tp_A1d(Z>|&؊T!r`109EAh`D ++ ` ?_~^V: iWhTJk .fcf͝ϛ? Rw*u;!pR:_@`)a]nP +0(o&B^C~r}^-uѲ Cl#;_n3Ky\$/naO-1ʰ"9ʪ(c*#2L#DP",Br@ Mp>nqSu>PL: X IwqTS.sDyYB ^pnܢ"suQ@I(u es*MT!fߑ+*s;"s8Lݧ1tSZ!4~LYWjTW4b oL. Sy8^ri&xi>~ Z-X!>☬<3+ V-k. Iaxü4ηo_U=Rx݀_r!O#ZA=N <6g69Cp\eNYpGǎoy\`HgFhm0$qa1<'"wPL j,P$-`6q9ChiULB! U9 rHԫwyÖ_^=*˻+|^ ~$`o*,L */&p ""C$ ?||L?]O7BWέͿYlj@`mL4QD{Wݽ&]()؛p)pe`'R$Abt#6$]r*q)XZlq)C!H6{D!OʚALvVs׊<%a[Ԓl/<ܺ@g#1pa29gnEo@E8Ocn]X-8 e^H$ 6H(7vG 4XJEzUkZ-m]%W]$Ait~%9Q< +@F<=,ӰY)W"7J v`͝Ŝ4!_0 ;S}a)-bq_dNM5S}E|MIR1:F$GDIk\# w!!\D)Q+s]Wm&:c1bQ % 85i}eTqλ zXo :WET1SdW"cLY3MK#4~1?sdn1|p75\W89w=&u1q^RILZ9? I\Wb_TlL:$Ζ=KjE Q <] t"go8g2+n :uMNnBFFb|瑎k>bB҆g\Y1$LIi2Gut똵r+`en3 Z="@9VX(c / Ո[rԀ*^.ZؖfE%T1qրpT yE-3![#C.tD7/$)]|'!fݬ Z. `Uf4?~:݃& ><۸r!!_\[$=%텠ǩ=<'o ABD^ _)ggnCY16֯(AN_b g!Emȕz$x7ZI<*/Ak*J*}9a02[as/)7[ZKbw"\1XX  ~AB!8#31K1whl1yBi%$%D`FQF%`[A&W en2 ~NP>t.E&(0ms sLk ;:͜(v$CgPBT,CJ*{DV" A֨11˺` ڙ*7I q:P%PQNW{(ǔ1Ղ<8Dh_;WfewvA+lGvϧ?7?"R.]3(xכ PV JB_*p\E8tbژT~L?-ӏ.nJ?*G})yFv6ܝmt@U2(~Xs/Թսn=]ε(% <2heͥ3@cRV)0>;/nugn `7̾8tgnpkJ;@]0Q%;멖c ZD-.mcJ폹Az[VH Atx qvP[T]aBxj>kY^\]uq>kAFWeʧ:wpݍdV>yw]q< 1\9 >VK'&#lם{[u~QV5&6-ĠP[H &!4ނ;KYSruc_Hv qyc &YJr\0ɅgJs\KP|,҅%$Ԁjz&1-heUZqlM8vHqZ۫uig W*p&it ^"̜! Ǵ:7 UD)@z[SM\/Ud.'Q]Qaq,IJJ ҙzυT#uA16́nb09s)6`SdJuP߭@.l@ D& BTX'V$|TS% XvvINU j{@\ ;ʚ1JPHB͕߸ ɚ)%A(@z+7U"J ] -mޠZQy}OM #kS_ Gd .$䕋hLJV-93oo)8ƺ=94IPasHQ{¹&&gD*k9D U}\ȾK@x")t]o+m p^9-ܯRäJ22QHecDj>aFevtfS ʌT$ݵy8T*HpCr@ݝ1AolXڟZ);ËM)pۺҡJ4nuSba@r)҈f09[N)ሳ%e GY8.ƚf`xXyJŘyF Js}ju! n>m#ܰw;o3)$x=,׈f+xM\<͘wb(E29e|Pxf\(n4q2 ˆ2;́t &s04RS=UO c >ʖ޹ H&tdnSS.ߙ7pG`۟gz}{ycSC/p. , X L^{n$*_fXPMPKZ4:OH Z9_; & q:AM2n|xLUBVXިa,ϣoI@ڂ,Ŗɐ\_! J!-SBvl,w0y' k7AnTemy;?dTQBX PnA.GKsaDVVHMs% |>WϫuHl)VBt5Z1o'e*36[E|0TeD5wV|Z:[LէT˳Uw@E".BCL͑0HYi )-'F#b̧l/Mȷ,I:}߯[MZ=6jTXŗzd( `A]keb4LP%%K)ƫRWTZJc Rz#ȼcZWH$UUWԢ ^Vi.!ևpFyԞY+ 5/+YւȁbJ('+ X ˜RX[nP:TJQ'+o|]1iLk5I,y0W.l,Fxǒ'c[ $jۋ8X ؾR~,%rxE]<Ҩ5h* hYeU9K&d f9u>)n4\@Mz|tuXN; xb' B6G"VBј \{F yPip_%o. 0˨ ({G92"&hW4ЋwXR6=zh6+,Zlj+m{Qi-zYKLJ%ׇIA CF`Vl-dBMϋx,WvX]b>X)YQY-Uz#T'Sf\xFrZxܔeɃ|I:ˤQŢj2B},r1H1wtn[|:E>whwB޸fT(d[{ލw trŻo)N'z!,䍛hMqfO{dN;xspٸ03whwB޸)ke1H͌jw,3^LuqO묍 )Mf7y69!US') 5]H}D\lJ 侗0?\gÔDž$[ p9%nEiHٵN}IȷU-9z'ZzKay"a ᵣq'EQ `,~πZDZ8nCBcn`XsXdo>J=7B4k! ˑ~) k,q+loV30o9S{Xrę6 C-q)L庱:JH`;71ܔpvIm?? #yRJPT5\< ^׭}yapޞ}X~{XSDV>~liN~\)bd(Zc@&U1ta`0$D(ҔJVk =- u^*a}} Or%MazXoWDg5|bI+H񹅤x>]7Y !.EV\e s*eXXK޺m ᅡxW\/( uU^׻*d(G@( 2UkJK@QNqY["Ѓ̪*4xa(ARCZz&uH$?rPL]$jBib[r@*V*񂉚%!¯ilm}&jҜ%=+jP^vFi,fbT2ꭞjܐZou y&ۦ$0P'7'^Su6en?–vhn>9CFh=U'Zc3DZ5ёXl0׃O?ۦI"m3*nibj"^n /t0/m#J{ ۓabZ3PKPKwFaFCs*I20lI29+u_zKhb@$@Ӟx}3Sc݅^^Q?  qvQaiKa ]C,|h,|ɛ2o:qP\=:n F­[~_mf{tW cjU rE Qn.uh5VsRn;;zxYqx´ќ4s$xDžG &U)jELXxwwE9G^2DiMK; V;; 򛜽dExSu'5}^D3twEQd*()Aq\JLj|[hd#|Ay6th j\߿VUL#.ALAZm Q[BØj/@\k[Ax'VUB*#]2>NSSNNw*>he@kx(*} uz.ѧV&tR0*~[m%eVn㗯3!g~X'ϭ^2ԔڻT{%;._QVǡݎn/ޡl4Rw=tX( &XQq9xRLrN9<܊7 w"ιüG<e>ߦjqi3QĥJש\>ݕtjM9(UsP{ØU V%8%Z"P`٫ 8R.(b!NZkV B^q#y (iI !H#f'H5*A(=F e;GTrS[(6_W}|5?Jq3)$[Rw4|ph5v xVǖF/iJkdCl4 IB6b)SNP#W1J%'M:ȋ h*T jY6JM?a Qe@`pHz ;Uy9?rR_tSuEӲ2*Z@s`@)j1 P)Wgѳ .ڳ=Ko0zZNŲz,Rw[-@h˫/4U+zI>9mJ)GF,o˦PɱYHӦ_ˁyl|{&9u6XW_ woZQͦ}U䗃6؏AٌC!+__Z)܁w;JByM#G&T-%dцs+bNW͘ =aFˑڧ,F|)cGgHw!Լ^`|E)hjo* C%]`XOӍYHbMJHhT{Tݯu)$cΐ ]KX9"-gkL ՌMd7+m7+CYczi.1\i>`O7}zr1` մ78-)Rw vQx*zH 1ױ('4)G2gQba8f ;qwٲ!# (H[Zmb6^y08c\W.8낯e]I#1X-j)| ʰLeϛ3=/  E3=4J6m'KmrSRLf vQ)̥qys2A>hF>D.~s.noqQ0rwp)4X7f 9WJ;)6j#W8ƨӆ;G%Ceo-ﳮmFOS$M]k3znrS7qͲ)f?{{7٠d/ C!u($DB~h|Q)2ZyFjc&3ʀ7T3"w"cD m )iq[Ԧ>`N16B_T<7~>6O~F+!7KohF,?SCEJfWVJAE Y^98oSфpMj-ƨET bL'uAcnMX 7$:Xw˳3zT bL'udSQyѶwK/܈nMX 7$mf*)svPL =]3juP#Uu/I; panU[ptWhh}-_9b}}@u_~5!2-7\|,V hԮq`k8VӲjed jz܉ zW ^v_5#e9qqLf矷Q頔g^t*&~pxuw0%ejDF+/o{ 8<e--|Br4/}U ȬTxrxPzuuCRqFʆ^r Pg )5Y!Aح;<?}{tf.PM.nFlJ7A^]-1ċl4!q"ڐ敛3ꔰeG{\38bѽł9g5w$ʂk(rea,@r:E!$&Zd<ф/ŝjU:Չwkۻp>xWhbUgKqg!_u˲|>NЗ1,gk?H6ߗcdRDBeI/*~c^׳8gZNs }6>= '7AijB2G.(+gQ1 u!4l'wїe.N<28 >D$0% yxO_x:!Ua!/!"eG<ˉ#T<uѢF2 sHG]7A,~N>+^ r[ f&O!q$ WCヿoӏ]`7> SO3 M;wvy;sd~x .p2,{ꯔI5b mTVpܾyH5jit9r쀑d )( :GBeZ $F[$%ܮ' ;mh+҅1(~x@72u`];,|@i3",grԨnH/W6J+ڂUjD9^yz*%.-Σqp$-%u+ӵfd-];?D)g3U+EGj7=W QzySjF<mkux} bH C k$Qg~ IY m uׇ0Ҟ KeOvJC&se7NOsNhiR,HHɿL%J66YU|TUI)偪4ʆVdrH22MVp{jyl!4cR)R,!3Z ֢97]7 lxlU2jôn-փ*)\'5A4邮lƣ"q6A#P~ 1!MO時f]diW@F,0&oRp1<*'\*N' ]-NQxKe_npޖ=4PLIB[Pg?H"H&FDS)ܭsY-Wvsx80^{)*m:+;9>BE{wq-ss҂o[x|^qcWD]fC6!ӝtt[ hT1Osne޺sEI;v Rኰp3(m@䂣vCzFF/HfMbBYCxm5[E5_鮰5p.qJ颩mXb oCv~/UJo%yt)? \ WzRip Ow: k3hKepC%CsCMI3njks"*)ó$`Pl9!ѢP :k'CM0ٰY$ӎlj˨:XͿ' lrƀ@(ox}@+isCk.'~1↠~)fdޗw?XS2#4RQL|e #sz!b7|BbrEWҐBeS[+Y%< W.GP/|9*r$PNheyM(:0S>k3 T^`O&W7#g u8o4m22:6l}0x[^d!,Gq8x2"+EN1/o͠  E6'}trˇ$~#ly:@ү*tgxUyUQyKS-eL ^0=ӎ)ï̚jHG%x 3Vc-GU*r&d4b258†Lhpey,J ]OmstZfQ!9xtMl[0&c»Pm^ zs~j)%wbNYR1l4XF?{WF C/3qÄ힘iG{z'ulRG@Ede9:ږV%L$_/16)V$H2?4 ڰ.F5/|tõzdԀZmPiΑ#N7"f%`:挡ZhȴmH IsVX<؃F77#zу3"z=8J8rpЖEDp+׃ba!qDĭ{sMb!skcQ={Ƣz# !\O$(xqrVbDli]9{ O8US&q*G$)͓gƏVlˣN(`"_4'MKql^J8ĽC~T,կъN ixDža0;vl5Z~zܡ͍P =5*?=a]v s{1=!''_,Va†D$VYH^T듐zDcD_BΡ~*p cp_3S !WYJA-"Yq@` A?'ZT]зi87 "Ƽ{2qZO'Ѻ3ugDΈ֝-Z'db"B),C&ۆ(OL%r $`zMa~3yo(jvn&[#e?~guK/o-\"p;uM=5s4X5R5XO:X x=N)L:M妌M?(Aq\b0p< \6G6y`ȂdE~)@I3d +Ar 3E 1ɄL*d*$r <)*D PVWU0C]q@5[WZJ#*qoN\i2,D`{w >k7>_R@}Q7w\W`QK7DHQ;{P*:cT-V4x>zp[dtTׇ}c=?ZE0b*M(.y IX }҄_A9t!!_ɔ_h7҈- }vLH`-CvBBrm-S݊@}X}Jgvw]߇[aV1]/1|XQ'g C3Pf_yʍ:8khTw߻s׎iO4v ܺ;Xxݏ:u FhaG/ .7z{3߹W<c '&x !Hu %}%6%4$`'GkbĖړ(/ wD  m =oCN]]]p=8* `RX}],Phjba!M; v'7{Tc6P( vP7p\$BU^3м8;7so\'&ͳ!O|پujZf @[AOٞ( @ % 7EO(bv3jcar^`5C_ 9^L&kU;ƌ5& ϊ1!1V71VL(zij^6ݴj9=|ҟMpSggnQ~gÃo`OD=Ԋݜ[\Us~ǚJFY.%} cqtGA(2|+cu$B ނ4v$c㐦.f8昚PI!1MSD R}hAj7jEZ 3&LqǡnDX %o5 8s1 {_r~Cčwt  A BB'^E} &Ⲷ$/1T\RX/l( RsڳKuwi؝4ACtuqǹx2 T)%dBo(TDFBB`&8D4W Gѓy?,tXeJiFO,Ug2&,A9MxrQb%%a%@sQ9G ᢩ@PU!`0J7L >a%&y\ bk&ɗdVFlrM 2pNjh%Z4G(b4BmxbI!lgvr9]!nr=lvËnT-&plݴ@ ~hc3!`lh:2Aohz 4Jlfok&(ư P/Z1 Rh=(Tۛ6h:Px Vv2qw*uF-ϡ` F.HN0_6ktܜ(Kvbo :ґLilHÕH|e}I\2qMRsL!qM~LrֶD2DfVg|'s7qxcŃIڬZ#!Vp$^K>ytQ+QoX#gfYOg×d *j9jLLYM<_d|Z}sJ%T_4f7cٱxՇ!|Pmh=>^!B5v?m5vm54 p' )Xj@hXخհ񮭫wuq< %9AN0Ί&ưuVZEfYY}~+O*JH7)˵6g[֞m9(ֹƃ3 UZ >b(bRA`қu.I!)@FP jk{<$.Fj"u" F (0rQq#dEQ(WiP)S X0eK縀 kpI%AH1@Up%PR%c$`EX6+`8q6I4V'xk[z=M[`iy3 r xd?n\%Gp}jvKAU~ WE 4ݐ%FQ| C$b,VJDɸӶ!:LێDLd%T$}ONI![B;L b.cO 5^5acI1#ŧ{a ڽכդX.'۲sbB j,ӑovUy>yX$!?)XTq/*eoq}GEЍyxPWg"ZRbx-oG]ntXޙDot+y8ߞeЫ?Vp?q%=(O޵q,Be7{_! xmqKLHKRN }g(ix뒈[5]UuuUwWfSeF>\zTu{W4Â.x7_ \Lw"ޭC*y7unUXșM)ZֻCjY7{e%yo k)T)ov3JU_ )ӈNӲ c-Lͪ oԣvcRiAuUrPBL8_۟gR&_ |8S4UVKigIƐt.cŁF}ٿ1F%t`|ʺ0ӎ`b2 '|gg?%J sU65S3}Aeջ V®Jp.nK%-rtAؠ.JȾ+~}|@TBSQB.aiK U:'hPAE% k^+=ZyQJpM4Mnot'NJn0$F5i2d+ XP q<|* |!YENzd{ ed094~~3(4u*KdI"Gfz_"/!)u@Kԍ>՛@lq=.WP7CT%8ota)K(R jG %%$LU>&ٔh9, zFoH F%rRwjfK: &ObABBI뜻7yj6YԊ˩U>]Q}g0(a;X+I%\..(SR%RT߮J?"YJꏶ$9LE'\,{b|E ʷV[%8GdBF(i@`Ĺۄ.MoָlprP2 '5V)T4^jKPf6>(ER8/.+&Ҝ"J9`/ Y .5l|ЩJ=,8{H(ja (HSl"h0;+sX疜${db}|σcH%jԷ]}miZS`V T{CJrֺlADF'!b("M*V@ *bxU&ᒳ(fRssJEm5Ө 4pQ&=rnh2;*8dTa!gn^6YQB$@E[,5Gޭ 9s-mSrˉ. dz4XqBxA![?GݲoCt-=7n9)ڤᲿm2J|؃6g5})s؃5}iZc]e_ k5pVE]/ ;X+\g] B.+`q.Jy.y?y+M6͐x8?1g֬~>H>A i/[((s*Foc,o\cQ_-&~L<`#J5/9AiM0K{rh%4{\(KrM*P.Q%uzKdpZGd)e4!zj/Y6Fhw?hXWѶLI)E7onCBgUw\PCjNjhZr9Jv 1)88@@G:zd8 XFqcE.>^v|.J BeN8Qeqf=QEŝy=UjR=noC(8ҟ^sFm/Bh =LwJ]v+=QE7jjMK1B}@Xl8L`Fݠ: vU' .xfUT]=pX5A?zQE6vK:ŏyBt;LN,ݝŽ66˹NQ](:Lq0GQm_fˑ>}˟_5&S^^Qĉ@ giLE3 59eu 3_WɎHϩטN`vK*%ŕS'\6l@i,y|E[q]j*5%u\:xelڒM!7 f`"b,Ii;9|1;O_ RCZqd >?X[{c}^? ZFV JȔy 4 S!,Lj+cZ4J88o!}3[,x,> Yc߁nVq9` l}L:1&78rd[C݈9ϔ7ce22( A,&d$4ĉ"i8Úkc"0v6c߁w!l'ޮv=ϯN.IHLoSř,`J VfDS 5FcXBy^ %Sk+(;L'YtLXPa1A"!BfT"GT)K 6 ]z=[7>%b}ng[^)HkpZܕK#'wf@!cW;Z d1$"VXC = CD"$ZdLۥWZRp#"U1dm%e8 0D\IBഔڹQ51<u+AJFf&se0"\0~LBO ?[ldhҿo @nABt;"rrYf>}[Y~,MTN# TzAfp?}z-M"h;r#^G?!^ 2ʓE٫z5P{L ۽O(3@&I\}]a >o'(V`*53Ͽ=S¯" ݇}qC +ږGR-bI n W'y=!W!tuy)Y(b7侘ox }SX{=NMr{$| l09)@PC hnF:du]![x)Q qY%ci ).}.Ź!}R\&5V9eW~1 9Ŵ3UNoWQwTU앷L犽v]a׊L* h6hsu*%тu*鰞:Bӂ)`1XsꪸRXvxlg-[d:B5)ښBJRJːnl)IJʐJnA Fƒi%晵CHվ3^ Бdw{p͇){h~d oʭmf V܉NÍ@6S1xU 0^8w?u-p!{w,/5 1x:n[j- Ogreqwxֵ%"̿nbeR VYW!2I ˂7 oZ~xyV[~wLrB7Y߂xꌇ:uϒvd{-_ۯ_ s5;piY1YOfJv9dV9Uf_YșM)cۻqs_-z7_ \Lw"ޭp#n[r&aS\q\υ!c9W!&iun;]p\9b*QU΅? VSl擇SmBOzGqqs;`|JFk(FlQ]a"=\%VtfʜnW>:( {|s ۚc)ژ,"'dhcT%@/>X{Wɑ 1/ ,P<" 0OYð$"aPymqHl=wfus.=zŪ"#[ 5Հ'2u#9&Q mj~H@ ){LˀߟƣP|kRXW.ʻ븱,ʛUqw?_SA7tT?TAǿw_H%Ϗ׋|$/{[ҿS40xYaMor2hwʣm,~Fc'F#zz-Wt2?G2/4!eaPJ 䅋 eVb7hVƐ#6\dHvmy̻C9>b٧p MJ ~qӒxH9ӂ܄q;#j{ 0bε@(Jm9݈1(ĮM51 )׳~O wI㹪wAΪiR!Aca‡bO9Ydɟ%[ ~~[p4`#%!(qs̓ }VbZ`aԓ a %il*9-c]ZFc2?W~3go:ys׹_NzRUz[SgK&2/** b&rq#|Y4)%0Bw~: DVAAjLhm<O 9-ed#%rsJ8%PW CF4BiIZE_Hxܗ_~/VdjRA[gu0-O / WEB [Jde h? :@|Jh8 Rq Wӏq2X}MN^ldU5t&=1.RJ}фz%Kͼ6.5&U Q .#eZA't-n).g@% 8 rm"@ $5=J=чMqzO6K7S].._8'F)U_ZMPkSrY? q".Lq&|4J+j 8F0hȷkJH*Ro>x6I?uaouգB!pQILZvvҳv_Mt Η}YWPҵ&6^sVZ]0m&dIVCyD3r#)4j(6 &筙R%c4cdҳWɖZyLQr(.p$hL,1yE8r!܃mo}+=~\l{`圎zp=w wi˂sG=`G'(A6:muby}P:61É7<`|-PR-;{ Zjw35VWLP_R Tgᵱk6: RuGz_Vpm6ZfܯdZ$>XJ*?hB&U8F97`eW>P)h-*50AVY 12Jǵe FZp>JUT3"ͯ΄5Z'2ݽ_bnKFY>eUАryOS@P V)%y, RR>I!r +OmSp(qB^OΗ =6eSDM5.͟o61\T7,NIxRk^Us]`giCY(7.;~s{v?nѝ-o_:ߧ(p]H/wxp|XLf0.P흖k{xe1iGSE6 u?e-giJ{5(I=Y;7N6Uk4w_cѻuAtFv#@´.ahֽFwB޹vz巼`5#z:1hݎw>Zޭ{Lևsݰ)̴)1B)ݺ @>T~N$sڗznﮪߧ;Ճg)~0\\}p?է<+|y0S0>ޥ+U&s5RfCcKi")d旅+)4uXn(]`_;<{_}J^fV?eSxnw%Ψ_>\-w %:_vGIޝ=2o3R\em(̭)E]SX i59#ܵSeYSRq"^Q*4,`+a׋| ,t&z<;֍ՅwK{KC~qǭUN5X;?5H6 vd`yJJ46$P˭9$+] ΎICq{4Uo$=i?z_[஽{Y6ՒFu rG+(uY8Uh_D{T'W∜lքa^.lEwD;W^9,:a7 [3;#b~n*Wdz,݂T R|*뼍5ᑨř|<*%ʺzztw u3D184!mN1x\S_ÉyTAD Tn)`tA@o4!8aD M, y&ɦjT/xͦݺ t~w;ޥ]a<=3(ӻa!D;ٔdn1pDU9M.#zܙޭ y&ަ(R۫Z$#؉JMJaHLy+<3[^]!7d˃ GLHde߻AZN1gG{Rz6@j-\eHU2 -E{%ukv@[hByY, F/e>QpR BkEUXX1Qn 2+p(*7%4XGRCb)6T2%ħ8"\a܀pSǛyg/}uHO dJ=zPθԎ%n!쀤t,H5$ p Had2d7agcPjuxG'} ".Obr >myu-{jh=8W<) ypY6q#鿢򗭺a@Up~l]ַ\U,E[CRҐ")`0o$% ~~:`VCkE奴40ȡ>ҥ Kw:&ҏHK@/.%˹q^`bFфE #)U.6Ĝ~N)ZJ-aOv) DNW& M%Щ U\ߔNO)QM*$G$D7ykaFԺD N^rwƙJTZ*r8$icRk6DR&QKcXt j%%BS9,R TAQy*Z=ˑM>Pa:ą$o?0jWPֺ!X yq:KjhWza/`kswDE/è=9&-s;GaADS;3:Ԇ@'97P*55hD/h,8t_0 pYܘ~c@ F$ y"6A$YHFd ) &hbrxGJ^p36l$yf6âp 7";y0`{{.Qz(|$(647/>DUtG}bE}TOe,$@@h<&N1aY"(w8%Do܎Dfh]h{GL]W V̡ E%Ѹ/bPh xh|)<FGu@B@{ ' WI $R#^x)vh>;q1Ri}C+6Pbu}Z;8Y D4A)^@4RL9tWJZmP*?rh3֎տ5kޒ^f7B,nwVu._TۻqqrB &Z3 @-@k^1P7QlP$%Ey)?i~Y!ٻbE3)mنG&h67}0x:%QRNi} bsS.a}M5#@p zhh0upV5u5ٽ"c"X:T%C%*2 2*_@L߱:/!̨<r*qC߽ל1W&,"-;?D͜hP&%ΩO̒6Z >~ҋm64IV$ -.+zݚ$=Y?=:SѓDJlŁJZzŁ{ܙ٭Y${ͅfd:֮w"byChLq2csn:H:mX 7SFn)֭ʇhјwȺ6g܄֭T)C6cYrDyCt{Lĺ,bh&6MX=P(2JRNҸ1y* cv$DAj&6e=)J8V( exTS#26j6@U^{S;J3&̙#S(O>)T)U*.geDˊx qYQDAOD$F:-c.Sڱ߰zu%Z,:Euɼc͡|,v||D/ScT!J8Z1i0$2%*&MĘ4C5SJAbWJ9M=xTsM~Tã*f%*v7ɹGGq7zUAe<_ax(\$QKP^FTsFa˰L!ɢʏy*<W2U^BUCgU!|)_w$"ZR 6rt{S-Gn ֭ʇhɘ~غV [RNi9mSDSFn֭ʇΘc/V\4\۔7͵$)WynA<_`.Z_xP7:2ĹpS1 7ն;v%PrW_ډ%%*eǭVd\a cGy{Xmoc" 񐒵k짿%sss%D"A|%tCQڎl!JzhopxnnvFOnDQk `1}G>3::3|E&X><1:rMBqY$%;G= X9ßچԀn誁ӓ\T>xNN$%;p*hGXP DP\{eb*XU|0(R1S )H8_*FʙV=_G|U_/?%Ip}7vEh*_yѰ&Ȃ0Wvc{loz33~-ibozEUL3 A֟SC R5\ZV/+&i懇W(H0_{Q5ӽm:ji>f|{]e~W͹]թzf(;C}߀~ Mn PzI9sg#wrM=PqdPAeyݵٺJi}7w:cbDr/^iWκNG&} '])dNxPeCq 6Ҏi4f:MԉAjzZ6 a9Nu,QJAlD#.3{W)sqqYVI:{ޜ L\ug @鳋g .|s47$yZPWmy҄Ƕ9 _>j>\4gJȂ‰MWs[9wNAF)$~gձت 1Hk<^y89O8Z# 3'2aˌkszg%o>4MG"`e9uFM|BV[TNynAVpIj-ܖHYTTe;JߥE%pJ(d}m]k Sv=N%zU<v*GgDٟn~F/ǮNα[?IIlyNͱ[wf-M3piQd3;鰁"bܐ\pIgښ۸_QiVBݸJ/ڇ=['}V ,Ų$SRdm i{(3CQ6+UP`_ug]Z!M 95IJki^-&2{샼хLƙ `b9d@Q0en `S(R@uh 12hQ{ȳ¥(〵>$-&̲Fņɡ*IM<`A磯H.E04Rƻ!u ɞ3DڛP>R%'x#[JQ>Y.BDAϴuP(1[|X$zf'M |ʵΐjr@ aZn\-N~ 27 t> 5r['Dce~isZY3BDgdhqTB@}0] ?@0be/'Mw9?B,^ּ˚NZ2FѨ ~9djvKOCDrjI5 b0f90^cLJ q|p>C)瀚bWjn<=`OiQ'ĜU,/gO >f$eUusWD_7S}LnTIk"a Q&O%G:Xv&~>;Y#H #6ѫ'ӏvKx>dK5HBPcȐ@rWP*`(&yܑ|Ʋ=U֞7(gCwa 7H[R"am!r}ZU ,hR{S>;},h~NExx`ɾ0Tf:abNN=Fw^e686\J#Dc^.bC>H4J IW̿o( LltPGrZ# 鿲g/.zW7 7͟C:W.~j|5eO? wSXc=P"ي_] _{ٸ؟kb}. 1o@3 &Hc¤FoF}& [RT `|kfT&;̢dQ [pL,a9 TSv`YAkH!Im=T) 2kg(WZ*+P+a=8 d/$rݸt q,4ؚ[r^BѣOۧa!L -06o?9aPݧ8zEJsUW*~~H4$R)'9Lm[\zͤuJ5v<9^mj ڢ4hC8"_7p?6vY1fFLAʇ _}uq+w:=S怖؛E.?^S-)SB'5V5ۚ*Ł4y Ȝw?? g˯>r8+YlݧAT0b2cIw8B&oB?| c0̣q GTEtTeQJX8x3/96G32\\̑4diKVƼ-d8mmwW42nRҚAݴg,B f`e"%e'Rm4ҴjW2ikӎ~"…*$iwgO" wBLX5̕jw|MƝ"6mxYFԩHE8ߕ=nJE:|h PRN S{c3&A1AעhCwP$5qaVZns~Ϫ8MU:#$)-4w5Ӄ#3Rmgߓcǻ"'m ,?y7Lۥ˻Wog2pwrZrJB#}25:ڸw M#$=al%4Ajp ߨ\B9tV$Y|Z'AAؽE'd&J5a\ETS6Uқm\bi j67nњTeHߣZΜ+mqοb:9-)mѪVh|x= '%n:ݰ(_5zOzgt^`jsSV}nft3W?%ϣ\FY1F:iQCOT;9ʡv-)wHk?͍5*ד̩e nOJn|4^);X9H:sѯT{ :u6SQs79L*v,DJi.ql!"Hyѷ0U uQw7Lh:<F<:D/ĻLi6KE6 <$\J ta0Σʚ9ahɝ# L['ITa-\R,bo,*I{ђ/\Tg5 opB|w\M5![VhْpJ,.]T  .D φA >z)5I K9TQ G$ > 5_ǭcasar>9rHZ̈O14Y DB$8 )@&RȢV1D!v("0nuҾE()m%|rEvR8ϲljr$]R-kIR& +c&|(-  "˙ymJ*}0I1/-|Nr>#pz|U[8_uZ2Nbk)#n~iŻiQH+kp;f<" o׻?n?Z~"0 M*$Ui[nD`ܑWAU>Td[t5ʧ玔q3 wYON)fW/V/.f9P'83\a%UPP9L`)Xa(c`NN]bnY|ǯ(+Nd -ɐQd\CI*|d8MȮPlܝjV^TG-kiLbBQ7^R R 4^N׋jE*r0Jj" S] ϱX;wlkS+Rۊs&T;BwEMHQ@ge1,` F{H<F/2ꆃBfW`70Z2_-:&O)tB ˮRx牭d+b6AkW^s{bM,5s 4vʕ[-eZ;2,TƦ_V7?,U"a12u*F cM670{dN񿐻SL-ՊUpq5Dg(F}!wWu^t5|8'lKOk.W/Jv=9~(U]8gl3Nt3dMSq8{y{?|aOa_v/qe|q.NJR~Z & >L˾H|F; uf}_N_`=7=i-")i+-RzQv@$ w8C$Ԏ? eTR ȱځC ^+A+U׎$fQ0+^P=XT] u4د`]*i&2ڌw;||4ç_!sW /!:FjTkw:cRXrHF~tJ?'%@viLg S881N1wߋIvAkmΫ pLtLژXSA3Q :j1zpf)ǚ%rO4/P ?m^p }/8 kB} @w7/hdV#N{Ejyy܊*i<a')kCjq_W"ԛ&Ba'WxQ㾝/^$BZ-vly ꜔l`$(>.e֗'Kd+bW\٣KJ![oqJ;9Mu}gѷ,:X4QR<ږBtf,\N[ShƴKٲ2MɻՉ, ƈ,cq 򾩔ȭ4B=ywc?&CV)i &#QS)M_?=wNv-nuw_4YwKyA;Bb瑙v7$6+r6:5̼ `8Ƒfש|} vvígIFگwhUU7r<LlG-;ay {r5H]k{=BZ.%>񍿇BM[jsn҅j &u W:SWH5hNuYctfcuY[Yknv<I'5ޭwv7 1^Fr%[ZWhWmI1'9Ah\t5.#砂Qל'yK껓hE | "GKPRRb5)[F!%e&) g\Ə1rA9C?1ǡ(<|C (l=EU/#G{#*m2rJ}<ȹx{KCzp\ ȪMZYFo??pШMOyqkSMV&F힃yã}zߞ;`=)rtSG+3A{7P֯l@IV|V^ԯ!O;9̵ O״Mv/m @Ȳvg&P+L}L@Y}TBg7sm_B)_>[>>Aix;"&'gȵEР;~7mݴ3Z W0–.EjԽ6w'{4_m7oԽۧB}!C^-U{cKuZ:3P.m{c53z]NskӓLB‹FVC0JiZxрiSmqfPJ)%:@v$÷Iۦ/7nۓAj ZMKB"k7asQq06(-sfKl"C63{Ւ} l7b>-h ^mϣ3cIy񍿓ZQY wBg57Bՙ)A]{snCWvGz!%ZO4˫DwJΡ;nqG5j8o3ΐjR+ E#۸J.Mh=M$ g)5(wrfͣ3?YjzhɗRkf1I8UbJ`^+m9;HbTcqߣP/>Պ`H1@(I'P)k ubDjj~sxBgsңq4i@h}~@f6|` >oW5еѷSE #cŭ2!Ge*G>RMl7ԁPPf,ĭϘ\AdBFELʠDndd.!H PHCQF1kE>:S8 n4 bbp P3˂ F@2Z#*`1F'q1ШdA4Vr%Tvav=1(ڲQ*h"_m(mERHPc=S+/@-sgueESt9`$V"͹@qx1" % O*x=dQ2iCmJQCI!`u(T"+4u ,KxwSs]0:]BMI#hQ#e] 1)E5?ŹZ*} bHA]1<c)CMYHTCV. >(\(yVg 49ܫQĤcX[>@ECqSXxYf%P;qљ$'R_iu[rY Gfep*R,FhѨsZSqV<+)1yco}tpJr8R0ƻ"^<L g{,Pu&,u=Ɂq1zv7Ǘx?Ts(W)n'rC7o;j_ pGQz7~ c%FYK|P?rj[Y<'/٬ޓ6_N[QrY C`iS 7Fh3(%)vIw }|7&Q?# m^<%sHo\>eGϡ~uGos֪UИW [\C,"b%_>^VU2< aJ.C3)V+&ҔS&S/TyLgTc@vCF:7lg'ۺI*qdR}VzDW+}VsNÚ=#~-wdK~\M_Z7Ȣ[mD/ιxcSΖb*>m H @„Lm?%~R&mD @טڑBDogl`yB/K0{ ֞i @қm@ng X{A)ch4I" g>f+J5> dIɫS죮ϧ=ox"`'XM1T |$k>*t0Ll22cK0U 71xno`DPn*\`PघH:W|eRw+^@9[v+DJY%IeHTjhWUI&YeG.jE%suO P; ά SM|j,T.̟JZ9fN6'!xeR"4.pHZb sEl|:;[+^rѠڳKB#ԫ81kX k%.8S TUrj(~ K~W `=bgҡL/>"%f XD+,8';dH{QH\ڱ?BDTՉq&vdL(KHYFR=Xs͑?dMbxKSɥh2@kƚI8 I"J2 6[oHv>?)j˻oz}֝Qd;5'l)ʿzs Xg,ls>XqZVM ɨکp>*$x7uHN4vہ:nQTzϸ>+5܅g u+ƭwZ?Q^mhS~4)֛7ӓ' W5q*QY%L?&T|&YedHʉd'f[sGBoTS#`Tv}ڞf&',AIpȍ wJj̪ Ov訏7P@OP{Я[^Is/X)5P?E\yoJ;sD=/3Xi+CZz;Sڋ ́P>9ٹL2 + Ʃ3Qh baH*0֜J$P9]uv3Z5*U>5=/3VAal (f TS)$CLXtc!S1 ?vDmjW?X`{[I5`I>82neS;կt+7d&AOnBMb/\xã ;-ts`vCW|=M{.BFf2R }VP ;`]oo*mU8yQ$iy M&eGs+w,Y\-qb3L[Ey߮PmmҴB|n5bW`͢JIʘ47 7 (n 3LW,j@(3G1(K[5r['=LR&6 (RKLҚjeN\KIR)}Z*HT#'"QKHh~>Q/o<\2[^LhΊ+ySvRѼnn/~X4@g*81/QM7QITۮWD,]#.&c*\-E &j|`gчI$daG֮_2 h-SU$7u΀jSmM6w5@Gf%vXUa:jz=Ưe\gQ;?D+>GV7\̿~4}v eU ̰4=y@MjqS͓*&((kV)g ӨU&}AؗiwTۚ:.?ܬohrJm8[mRW`f#GG72bPNMlvQ_cfښr-w|y2@c/2`\ga`yWf@e&͉WᲒHc,eX/,>7Jsd> gtMb~DZ;0EюI.֭]fE| 4D/^tNyYpf},rH2^Tbљ?? 8tS+ԁ%& kܘ-oLq:}ΐțۘ0|4jp]%/.[I(d6y10z3}іC8B9if> H:]uҭ>oW ثVL}@-ZL-̨]^d*ޤH܆P,0܁^S;k5 ۮ-ʋࠠlABYJx q-卫`0a_P[ gq }{(7.Wp6RoX=$_O䜪A=:=5|[㦙i $ɘfẼPg!l\V{8mBCY:Eg_%/Ds-w yn`c0JAiyg0PMȞ}='4k GRh\y{;mE@;]5H!\/EuH$C"8λ!Q.mE-l*qXΨ_F~e4M,kY)g)Y*Kc TmWYq& ̗Pp)g䲖]_ԩeחqo 0k!?LNyZe;۟TeŌeobCm I$9>_&,vw ndK& ='w7_=~M #miǷdhoDnӽJVD)t|ۉaxqv=ļٸ0MM;~Ͽ/QvA= ^@BgsUٸ7bJ*wZj7\1k}8@LtI+W&(Ay9Q9b,Q@+OBpƃd>T>Ob4W!ծ;NRfSu ZovxvZƆTp̞}wcPʈ^_{TueeU1 j)lMRiѶx\1@̓ؐxI] BL~:hU];8Rnsׁݩf^(oC,g,S>Tsi"t4r[Q˸I.D@<0*8:BhRT8"-\Z`i(=!Kсj!qvT@ΪZ I])y (UO2VsN-z nNa; oJp2}]me F㉬8T)TѼ7J zhCəFYGA*DR:UY",Ӝ.\N@b CAGkTVHӬi ;texYIi͐S=8b1h.4+2+ ~ (@j?A 8(jt3Ltce?ؘ.JWIx S!Ņ}'se`j{OKJlEIKG)W%&c˴>FK0e& =E%os)$\ȗ'gi,^ަ9ۤS4ddh!l߈d6{~s,+FyvވcN!DŽ3FN>GL нlޑ{xu8q09꫷Kij'f^9zRݥ^ff|'в7Iif__=w_rq CK{c|{^=Vm:sׅ9XSO G0WvoC^6.r v<ǚ3C(td."i0ؠ~:4>Lb)$C~.#(]XKja$ F@iH9 iq7,6#bG!!o71eZ&dz^Bء3 u?v:p$ GPv$mi-4GVdpn:mTP?6$J+c[ۏ!УCAT,+[PG\j'Jk$1pm= ־bRSj8dRVʷ& 5 vhOMo VFz|"Aϳ fcsvpZiI?7P;vO`ή˾0t_l3{?1g e0 0Dy3 칛ٝ vT2U~F8Sԫ93wqbs'':V*Ak21Wb= ʙS@5T 2X'ͭڌ6}EB*jbieu)ueAQZiĕ px.BJϲ;y]L:JrnRfҚX EL*W͌ a!L)Z@H&OVDQ͚+U3k\ZP<3DV*ԕZk4G&'e e@nɘ wFҩR3*IIYY( x)c5W4Aǘ/;P(IqVSbl7ZQ< 8S1^Ui4Jʒෘ@5Oَ)=bKT*͂b 6P/c% g#IDv lWFM!# $AhQcK<]A`bw;ޡ m, .PmyacᯈS릴QZJث`.*}יцw|%p)g>!B7@ry-iQ4K54Ӷ3Fqt>F7_fU'bЧldbgٓAV쨣8ʈ TZ\(PEij)[]6a+Sn+Q8?/mImxn{ Dq썟2D{KG a8T'LmXk5%h/ |K[htt?Iv[vd(')ç%:)*K}嘶U]o$W%fC~8J\|1%fJhց+&Zͪ_Ud T)PҢҺbW`섴 0B|&>k1|,POoEl4Ac~ǜffBge?B奄Q2FT3-3ȃ. eN?sF2K$V;s:xKB%+]JpODQ}EAGEaT$I*,1܋$ R Iieu%JYJ+LŚ`g)VJhU2d*xAXH$9/$ wNȊ:Ꮹ{W WQS I ,8Vka'2F9 F0X2ǍZ\=6a~F  `BZmQVds0)H - 넣*F5c=>vr\AՈK_( b K(d34AV`íUrB 늪Ԛ(D{{1ga'_( WgD^;rHajDҽ"]! KO\!誈zڱK)X*q*/cE(YID`) G$ Z! rF?mm[RBCCI 0H &>j1q-RbO3m xD-JWŚl2H '߷⻿4<ඕtZj"ݻإV5x珕 8(Ny'tRpԹϭYxWY{IJ/-|7wW%x+A d |۵SMuEk=C[_#\f-)ORzC:=oGwuK6O:uuZW֞*yO[5D>̤!\EtJѯ{֍7uK DuRLjny_ Sֈ/n`[ y*SBܷnpk@떊Aꤎc݆""n%[ y*SD|;40{G3b ?1rσgjEݚwgj^m>#A;g3O\/'MXI *)0?YS.cjysj{m dZ©/#𬲰ÐMGeHKq&KjoMO.dJ(GͿĄUBUb!<㗠<(7zOjav Vz;G` L Uu&=hVY%T]j! C\k@mc]$N}%W *1mrTsɎjn A:o^uc#/J++; c%{TkpFAD1gdí\6=}3U4IJgT-I#ƺ }y ޛuK+hrh+WℎܷnJºb:cX!tM[ y*S38a8sVP󩬠7 `i?oͽpjz-ew{F{{ᓹ6W+a)|X_L meRoW}>k VNprS}: D"YNϗM^52CȊn'ޙB mU, 3W2Eш>-"I(sgE 6C" ,"tpd\iQ$9Yqo_RʣPjGMH27zazQϾ= 0?f8d\m?o[̫~Obqs_ufWt+1J)DN-T ˦sS4bJ@?V#}hRv}뙘-@iqW^/?xMܼ.7Sm+T(K.WX(̊HQ8Hdma%%vj '> >߮2:d/ xΦjM^[+`Z4}S.X\m"lmE/r##tI]B¨k@TFN1F_(@n.Cw{.T2@(1y7@N{(@l&$5c ePct4-(Z 49_M Scs!URa˲0\!b2ZؔsyB"BJ[FDV[0vUA.abZL,a0JۯHQ&({o7#?^#0z{)?98Ƚt1b3" `B({3M` qh3ZHqc(&$F;jxTa5f[?GAA$%뱗qPǂu%vP3P2@ 3)Pܒ H.X[&589\o(o`@ k,dXI_ɴMU%>G+<>șo_ɽ [?)fe7ڂ-&#Kɽ8 {lŖO܋05jmGgZdc.1i8h vtdEgQb QBTo;SYը$Dzyx"A?Ȼ a˳09a0c^"[*1ExSG5w$n/s}#F<uY.+|4/mMZNRs*gm kqygޯ]Ȕy%QEӔ6xaªG{1s©:*SӻOӷtc#ChxTO.ǡ{ .=;T2c ;)C.8NQ(^CnIj]B=wU\Y:dHeT!T9#N |BT%#2d ZR!̔$[C8eB C'4ݤTkؐ(R:CL#-`M9Ȫ[)/7_8Kn).X`{E#+D-I`L(@AY $ Rzfa_$ErXH8D$Y8 F#=BBÄlO86H6q)+ %p%i "]T@흯3PlDZGfޝ+!渽2^/(o:% W e" =N:@O*swIM;U/]ns ͹õO*l4aY:,cpΙ[vey˒O11jʴlH35UDeaH2nWV=v=}TW*Kze@t^5õa`-gZKK`Rrg(dbBkNHQhC6>H  =pE bFy{B}U$I0GHaTw5skEB;F֓>cƌIFb<>.MEXZ \1Юb2cF̂y"{3p?S#tKu]>, ,c3%"ԷuOZ0^g>{ 3lֹiiDS~F?5eyk)7+,o=|᫿5=}}-Pnݟ \̣>W+gU97Wl?QWnKnzSMa&5@~wj}}ޚw!;%ԟRIexULĔ1.*%iN߼!nywZFɼC4۲¸a!Y* L9ɋ Hqh[zU`,R\\HDh2{_2@G)sNG\'o֎i7J45M9E{4A>(i5_䦰L<Q-33Y1qFRdM`kGr-:nyk(}q4!nmY!vIXt~glz#;U <3a^Xe \W{Ѯm$A{ؒͬ~Jj]kC֟],+u#"U[l59P NZvvTeF/stoo1D%1}?I+EV~Q}IoQmp^`$Iӏ}\KV\7QԂ+ޝi=[T|JgMˠ:%]6Q$ɾF) mꟿ#kLFm8㍂ʢt#6:a I3:q2ƓHxmw*Hh>`[{w\WM& Fat,# m Gn'}u[3֜Zu a!K[c+8]{3bP:p& αF]li$:&5''!a ,g?pSd%Xйy~ڨlEtYw+aLzGwi;fVyvrn{rv K3t 9)m=tSJ̵u%ͦqtS uQ=q%AIz&Yor?^-$xs"$O?ކf,l;(ݧ|969oE-XOp 1܎݅0ZOc2y1}_HXX#ae};%=8 u>F̳3B&54$e,p 謦zƮN[ LӮE(`SabVѬ*kǵAUicCBmPamZu3Tjkep9V5QhivъC_icwpuRA{W>)HkǠ¬f p'ܫ#y"6p.t-|yf18{1r@"#́{s42o( hĝ'MEɏgx xt OfңM3mãSnOشK[M/>X3`9-d`hn`O<l$O7b2G 2-z">$?4c!&9KQE4 N aG3vLŷhYXH])AC=V\<d B_̂z KÓ4M;{K*3pxg!}*I+&WIBe|ps Ρw.җE o5ɒ<K4I7"@eU%p1ԺeR,2wN҃]=8F+89#Na0*eS:тjNiHʘNBCSMݐOWjuh-juN=աs`ZCU~Fxr@]5Ad4@vt=4K 10Gr߹{_H_zߋ4K>],܍6# {W ؇KތDvCmqUpqa:uuSh,!vhUŮo}vvM=_?q@w \Tf7\ˆzݧc|]_ůyw:Ǖ%N2GHCOixc7mt-|Rm_ PLɷu]ZS+ rMi2?'E]F7a3G+U? ?>WI|.yccQx}:>C ՞sUUw.0PInjc#8Mf |N2uO3uzj1c ڸ .(m}8Սj|ݙHU_Onu 6MK6Qzd!/$da~ɦa?0ϫs.bSTQK#cjɦAY'=)e}qƚUq[C$ QhhvMㇻaH~8,>DV}QazǏxx/}߿&wqq_u8]}ߧ?9Ŀm|?DC8x|B>F{3k4,$bW~wZگҁIyAi~\odGaCI٩]GL^G{,rb&cHu2i}㌮y`lKc1PiA_ִpI'>.\ZSDʳ+QBru:C~\=Hpjf,Tg(OmT&%٭'N #KfB$ZPN$V3kPq^@%*VRۼL~XLtM㈓-+-eWp^icyFJ9Fzyje~b %} '~=i=55+,X`f\"9 78D*]޸PV{E͕Q' uAHbiԅB*=e[!*|/L_x!kT?h 8\SW6p;>T!y('(Tᷤ Y&I3M%K3 3*8!lvg WvHV$I* 5d78Ot 2٨ ej (>M2(֩8Oh &R‹8J(Ȩ\)|X~8I)qXZn3 w3n; P-MӘHǵߍu {]vmz Jo5h556-A4 sjnE֠'Z ޣ/X бCbi8mfЃ2b e 8t)z˧ϋ53CҪp|<>`>1 ~/Pq>8|.% m^ lkhͺzeh,hTDo#o7?d92 kh;?8aM &?[i{ͰNK49r(?Z]6mdӏ@YZp%}8}h"mp+6y&Mmk!6O.I v- Ji%v;-ׅme7l5.PV=KFk/sj-a۬%j6s5ny-kT<_ݻY*?Ggm'_btOOO;K0 fFs!~&[[~ѻ=H"ɶm9춫m)f5^{;3% (ZZ|~` EyUZ:w1$PyC M)4M5O~+puh/ K+,5i(zl䱯P#|*ݺ$|k{R8֮&L狅Q`^&8!CCkYBݎ }˳+g'BW +{Dm'#8 }N[A 8y.KD8ڜ''1]"HyWzD|Nf]s!VHcI9OVozL\X.i")c7sTGym[gp>4ӡkW+suhߑX*hp,Ϣ TVuRUƹ*uMMMUƮza%<Ǣf&8o+ ) 9It4l'OC6JeHU{r9'уwIeHQـ&7dQ$!})i09󜘇tC՘Ð&gSs i Kngv 6N R}PFv :O #9&g˭gK@?Nw+Қ\9uӬ<2Ël2Gf*-zd> />4c!x1Oo%$Nx;s Kf…I셜9:J<{旚yˢifG4޴ޱӂ;*Hnt /Pv{ prm )Ϭf x+jwts9MdvF #;ʼ3*C@C-:WSu |\-}T ^$dZzЗ+{K1IKoC;B?~x_O5Kw%YY-}F&p(TݧL1[!>}Q(XOlhZnJ/ٗ$vpq\18=Q-~>qmw][o+7+y`x+yLv }VbR$ZEʺh]iT ^tih{TqG9 {SkQ 2jkoz)a]$]/2Px_?6<U5@d^`){ g_֕;{C+AHUy<.n'7&p5/+}I/=~qS_Qԥ"JGD*X_%l =eGIɠ޾j`fmHǡ1.ѯgaP| R' 4DHXYUSL! UMRR3`dkdȶ395y0b ZGb\XTŤ4uxf/)Bt=3 OF Ι":Wxq3Wcˆ졹3[c|C5M2rt3U̐% "V.;Sܙ&&8L/;w Cy#$ GPq)lX7 =3DJ鄹S9@QZA=T @/&RCIc׍Y~Tۧ`"x4 b(^):"V4^" ,#0ngb~)i6{wf.bv\ǕN_s붽;t<<|tB5_Ϫ_'e3_41bL̿?g^Y+˟ye37X2Eyرn ,X /8S&gfo>un2885v [ZԒ6oa)^k9eӈA`MI%4oj%$~UB|TjԺ6/b5e ʸhcl6`˲LBZ6cXER1[^lm/&kN~V#ʹkrbwFmr哹FS$3Nۭ[`f I}ljFw2`R.P0ʷ<(u”0E%u9Z"tKFc# ,Aқi%j:w5ۛ?Jղ"pvLOJӟ5;89ÎQh[ޑہCs_e %=+G *5|,-Y>m.#w%,w^9ZAśhq!JH=ҤK@ƹ9Y£#FE^䈅,q3Hᆘ Ӽ}ݏyɧ8(2;xc ʰY;_}, {}Yn -vq <~G.]TY?䨽~:1N@]EocyHJz[9˜Y0 ww22}<ͪb{6~gT Kt@I1m'8na'ZX׉)DǨ sIIj'.FwyEQ\U yQ X1ԹV'i$z3BރE5%Oyf4KIʺM>RIthOnT_ϧG/h[o~k|v}S g:PN|1:/L뙕E_p̋;diuxuS?28}ss`joaփ/KTo'=XGş׮\C_f Bfc Cc|> 򞔔"߁+K'}tZp67O׉Q(6j`xοcnLV@O0aF\w7w &j|ŭnYnfO5 lle_Oq_W[Mjnz+%ưj& &D.2Դ!9W/)$?7цo\,{0keck,V;@S )k'jR ISPVISɝ}&r<jNN9#(>fETGNp9B{0>$Нu?%t{[G oɯPpN#uydi4V1d Tt7㳣`!%QSٵSdnq< |c-$`FQB扟cwS"h7 DZщ+z\c.I" w%ةus97.^¥=6m5\ YUB hXV%e)Fּ-KP "K훴hV:N2* A3h@RVihKET+)֔j/XԢUrFy̮K'zW%:514PsC2sD!=~ C#4(I&{.iMI(]{.Yd@0u|jW%ޱ*-3t85Vic457z½mlL ? p=i31HmRt;[S6A9H'Gg;.Lp5WH@ x5"x ӀN_ VO'Go<JSiEqImtt% ؔ!M' ^tYe "9c (πj!DJu鍛x+`qK{kG9)G(~^~!@*d$hƖܚiF1ߺaQ ֚MxUII6) e]ʺl8/u7:|oggwj#VseǩWn9y:,=<ش#dWH,|C =*80%IL q4IU}'8('Q'$GٜOd )"'@nc%"Šq̖h HQ 1)ddz2NEUDgfo(!{$g\FC`xC) !]$ t&9ǶȺjyjRt;- mƴ+҃u񿿵b9zS<+;h젽;hofwfq` UU VZp$ذ+MQ^]K><-9ˋ~ Nٖ.-IZ_G-^?y\#UDT[ͯ cL_cn' G+SjBʁJ8FY2]? %[ Ѷ5d-#5HsPX JW-*6TLATZ#O3"JB)ZF:#Og\E5\!nSP:H3q320((j}JRW:%F<)STpQzʼ;_* Z_'Wij@3GWyUK3Y}nϽE\҆Ec 3|14f{>Ь #D_HeS&&Pu.&[Oiw=3ԆKy/x=._>g0{@qj6~STMqjMZ[LKګ[5{l_-䮝w02t;DwURvYZ]|z6@_2!h^+Hj0+ǤVN8jʪ貕X\o*Hvy- FS h6X',w~F m ꆅ0*):Ų=<&@ z?Yv3`lmɦRC:fGu^eCBwꥋap2&u{,:cڿpW9ڞQH:S&s`Z5]Ro[v򸳽C0dWb+mM`p;1æ9|c-AblUچ`㝌(UMvTŹjY)d*SbY|žrk"s e1~S=Β ̈́O%\'/5暊|{b8:7󞽋Gޒxr=k_u3Y\}lId?Z4J":WV[blkZe-_RR2Vx*"af]8$] W bI͒_M$zb( ń ckтnUw?oY/qd0`Ss[j:e',:\5f$EKE`BszXU (Mj ]뙱Ro7w7Ϟ U()1ryM1 !0d< $lfemP%0Ă,0\pbB5"!8zΨiLIa&Ēj20;!8l.F$5͋^4׬M8^q)o}dwՍ'_{8Kwɺ,W`HgJ򜦳ۛR<+>E:2>Bμ'{ ˿͇._$'4t"dYJ2X7og׍SP^!լ&T8+>!TE}e3G[hv JcB36T㤋=(Ak.T7wR{IŧZ6k\G/ܴh2'fO?zXтРյ TtW y,c(瑈i + kbDBa!As-_[~Wrqv|.z5h<6΄ة%Gf\Lo~l,oh9cwbdv ›lp͇oK7RKhGYθQMN $ 8/8@Ȁqh9(F1TRs&b9$Kߧ qH]fv=-%6؈ۡTo @!TJ(hˠh JY9Qz&զRaR1z8Vuq\;g[Q}9}ZiQK HQhgZz.*WT.gvx'e/ ms l- ?;)cq[5KICwQ wOZum".nXE[=/.چ:+iYu9 /9 ӭ+w75,TӨKẋ++F wI1eivVOIjPvh!p.68hC\h`.i]ۇi[Q ;5O_kFm5K-2Ẵ;WMVg 6nT&JrN\m4*%VC.![s[.hT8>Ƥ( |q̪.XIMu΅9YZ쿴 d-2ƣhZ= o&RpYl12h}ocsTUZ:yf6&J&IO=UQқ+ $=MZR* 2']w |BP–)SC+f8Kߙs(RoWꋀ?JVk8C`醴U3%mM؟('LɺmWJk cG'zY3^ʹO)e:fG̣8`uFhE;R$]m{ڟ @gq/ABVjeU/k1mDw=mU==Rَ(AoE^[i*0km`}ek++l{J\+"; ɦH<VSk=c\J-;P8>[jiy_;uzwQ5G婪n'SDM]T$FMFM5rڰzUvu׾ve εfT cu*ᠡsu8W׬&@AY9S0u=l?GXrowb'FIaok0*rI5 +@h@h@h@XAp> PRDܗD>V! ,() #!_`B"2VޯYP-|:M˱6zyyMbnzl4 yYm&E΃wdiחdQnͦ%++,4RE*9d_H1OBiA@!q,ÐF &S@" ( cH)ljp>,f&k~zL ̃M QEi/Z.DBodǷC0LC4B"of~/3 @P|Gwy?-4@PbL;'X02-n1wvOhHzM^~xw`" F)_]}΢Բ7jN5/%^n_^mhzA+J/ۡf +@)v(5TKP.xE%T֬PrK)eD!T ee;j֬wءPmk3F)v("kYDi'Pz&kcKF)Ay[v–zA\^WF)bv(E̠\(=Dpj_^8JC)NbaRCǿlڝ|\ډTZy=տd[`u jHɥ >5R\E~5Gd="ڀl980'yi3hnJ["fODaPI*C 1 >BTBpGy 1ߒ!!@pI-2g۲J" ^-;#d6;Lpk8_IK/`"Q/xGzqBGJsWq$TH@ ٚdKkJȩ =S)Ff\f[s' K|H9p`̉m2.@6lO),5"2.s5RI8r3#Td.4%BKs~)31}"ύݫ4%hm@}?z՛HV{Um+QWaz lYѽ<+>99^꺑7_uz0.tcА24WO$(w*Y|^̕/tH^0R= oI0̐Py[ *?x00? ZT윾vXFx.C&iX\[ K$5EY'ȷH{J}^%M"˃2Di~3_L^5͛Ԛc27j*=iTyzщ| Қ ٱϹ|ȘC'i簴Sju0gCpyҡq;WT.eE8]9'1)䠃bIu?&k2 Zoc0W\r=Aoq6.=Ƌ${zE~A[aɚVev?ݺwqHKԪQa*[#l,Ryg3=oz{evv8?> \9T!umN8R!nXnl|yI@6[2L: c@t-Wo }ӹ ė~_\Ec KTQsֻ ՄسRnQIq/T3YQ+ {Z?rfb<|Γ&P ;oB[TsZ;=an1!(us t\Ǩbι֬{ u!_9D70e\.>Tl<Ē8+ߠg1swsbT<虫P3?}|禵M19O:)`[Fa.3"c;ӎCZz#b'U wɨֻ+J/r!Yi'PJ-3=i֪~EX'E ]PJ, TCHe|/vI$9sD;CT76IW^4J1C)6+tۡg+d|F)4J|sZRC0+J/CŬqVqܶ}OgUROݸS۵-ZXآd?oSXS 49cM(ꓹbq\f<WvQ~,Vb3FEiʯQ,ЋB~MXՉCtZde5{S,Mq.>hwwW"+g/<"nPZ=/-s K9ZLjٟ$j>l]]٨+xnu~tMF-4!L۰J=U{+N˭^]h]s.2Qf%m+O{+;HhJE'4*&ӅӋFeĥ{[ڰ։@L7. *JIZJI a/Rp%EA}IDc0`Q1i8L@DXjR<%HFߒnq̝s0~7\!Fӎ 12'ӏ~?\$%3V /Ϛ9nqJnA8[<)L X¼O-jn~V0ٱұi.당h2G_˪@yo~~4d4yɪmR-sdUE)s*&v:R1'稂usSX)4u,8 ,Rwm͍rjwe\S3٤2I%[.:#ZYg+4(٦uAhKyXF' RA%tź+!k5"T%ٕ%:E̖{?m:*^a,'':_g|QV|`u; l^$dSWn4mKu)wV=ns_|Rz'j?T !0iMEAāyZHn8$A݅5'lҭ2lg~3 ,Go>lBzV^B[[8l5{b,Xw4sl "cnO[U&cr=h>g~Z "vGM=+vujk MdV:xw{&o"& JX'L(ޗlܵ#V2); ղ3=SkxW]˝g]yJ.{N^5-GqwiHGaTA 'kݓ v39zu۝٤@,^!g-~mOEr>v@Uˬ)=4}tѐB>v= bg= m[EG\gt ^m=vImϩ~)mۨGg^ǃ)_F񆐐]\~z\2"ە2JwPy5=]UR|܈oȨAQ?tbI@I;>}eYUWoU"R@ WX!wPJw"iX.(ap| 2=ǒ77e br?4mg乹OGTnP侨"B_U-k&N 9#W|+!_\1w%T>z9WOg"2I?]eQy_&YqZ7i`7GJi/h\|^4DUoI/qyRa4VgEX lL [KpyNHR &3}Sc j `8q`!l FIu|K{{S~q>Y~+/ӣfi(6{[6)0h}x /uwsW~—v=A dBXR'B/?;AE8^hf:Ev"=ףaбqd4M߭>>ÙD>6Ɖ(^ZV tsy>O/Hcp:>F349Y?s֗PXjDD _X!Tz-VَՍ_^ucQJ @ʫq,f۔)&.urR\nu: R־PA4ڨV6;`.-DSbz.s~w1[&d~* RDR$rLLHRI6~N ][ºB1}6ɩТռv 4Dю6^]JhR9oq%Do.'J_?ހs%9cx\ݕ ̵=MWe6TWfH"LfHIh㞮]b]+`Βu}4I9$i@K4F˞ c{yqA{.xr#-v4Țt<W 5Sȭ^ljСmw G>=amn!0l(v!)C>bENOn>-!} ۈh/JH lm"LK-}wn})/>!!\<}QuN_ލӿJυvq8,rfq61h[%sZw[NwU9p y"|!A x=_EK_!9>m|$ 7gȀ`|y˳HڣnjQt(1;?@qruFÈT qT+؞} <,(PNGx'g^.ϼ\yawpjVX4Ae7/2oWeEp$S-I !Ԍ5Q5޿`qķM AGiPT[bi3GF@3q 渃ZhDY'S4M\uƐ3K#_߃?ttݺ^ Kom{1ĻLwR랱t l7f&cj'Լ⽝~\/IiO2;$WoNO#[z{1k!uKzHWZ? Pˋd1,QYfk 3{7Hh = ̆g4A#]:3ߖf6?Μ]03VTK\xMG?No>M.o$5QlI\ܐJsl܃^]^\[l2:E#巓b@@[t.m\;wӽn<%+7Xb`JhMrY\@jՕR}MRkrg*b٫8{Kuh[ NkȒא̪,L51 kFSkkpـfTfG'(ikC&vW"M]?:Y3Ό׼ׂJ͡뽦B=!Ϻ>J_]Vm9w*En[r"X>B{;#{jv8j @OnjP91uNJM]lt5]Gحfh,]=9OtwIn>x0T2-$8k Ƹ_D:VV_߰0'U t0茴O ?SgB"d%Ba{cV~Iȵ4ËU+9D CmZqJSYt:\xhJ z0l1/\>;ۻVd8)>{뫰a@ jW ;pG#cd]Ȇ(U/F-ؙ@`Qsgh=v|xYUڮ+SCkظ m2!'aZ7` p EZ X_-v_nx(]yQl3Zt%bƶQ'`Z95k5 %C ڐ(6zC |{nNlu/)޴[|@ֆFɔR87fl-щmv.9G4T!!.k2ɱEhZՌ̨Ku?ǘ1vX+ &qJϧW_\/.fgIfӫ ar,HZ АhXt_&,Ja,1er}}|v mSw1hԍuudW|k o{$ekZ'ͅd v1dK&F߶)䕗fb {A;Vb&Ku+qk C[ ][6+]%U~HSSɾmxKjYRcO_PnQJ$n["yρafKe lxLc샶סdȥܒޜM>|N!EߞSHF_nL!UO=7Zߩc_`Y7k;g Q=p];ga2$88}y4pٶyt; @͜i=_Ӌf|y1kaӑb ц(z#(e LL0vTj=X]Vkz^9w)A8Gb~=vM=FL űw{wKV°{Ď3v0ta(E-# #XmS[j>Q2%$SM<+>;N JgM^6RpG>Q;oJ+8N_Sr¨Ra~3Vb-jv^⸺Z0{=,HG⢭潟t^ M 5O2,MzK7?&4_Oz'LZ ͫгlW[l?#mxQɪ )_RZ,Fɪ=g.B%si UJMʄ$2c*A0LTp^zzemN+Fcǹ_[=䜄VL!mSǯp%TS^Hb #}WgI݊k^>[UoD;GY2y(ǧg[uX'J%iw uDžtZpvoJ@]JfEGgRi tX! 7ꎱ~ƺT gnؔȸ+$WBKv(vi;88X!Hv SL F`-ۯGX<||W4{ pzby68hg4<#AI&"f˅K lA)3?P '0 u =91R!>:TǩchuH?' ?`f3KmdvX p7[v4eb,a FML/0 b@ofx' .A9fiǔŵBx)_6'sh Ďbq7Kz0D ṖNNAS k,e L܏ᎁ/pqJ1;!cY\WRv6nΧJ?w_^;\k3{1]'vK!4kDQŨh;dUd_\hKI1XH3/9WeZ @VM/\,rFK3CQxȄ\]$ )aV8sAFl+B+ ; 0c6Vq5gjX- #4pF8QpZ d<+.P,yΠ+2L8J WJIhh.la=tB K!t03e 2"/IY^.S"H(Bo*)bJigUj_XF; kr4@@cE:-?WV/ӛL?j:3pLכG-PJ $20֧twwj$ƩmR%% ƴ!n@sT)01{qVT?Ľ\VJ*Y7krZm`8k8N_r?>TCj s駇4\BL|*￷ HJgHdC"Yj =5Krk6jy}g2[on+* `x?y^'d҂dpϕyрb bPxootZ&,HeO C,UYR5efr̽ܭp byEɓ?"9șSb4uFduk@>I?CbvYຄgTĨ:ro<`QPBPJ C()DdGxcrqgM kl7g0 l/Nj$ƍ3Je!Ä'2G,%I95|~Q;. s1w$ 5o$BH(qB#N hq)/ 3N3eβkV3Q&j+i $@}yzFq d^ǹ96Ƒv9TxRP߰u-Yc>?F13bC7x OHجmx%l(1;JbDwL"(blHsD 1Cađ `hhvǒ3oBp CLfcJ1g4i$*eaD5x8e(C1_:_ K{.PcNb0̣!v} R[WUPg E勯ث&$++ϫ`ҹ _͕CCĉFNYJ'( (/A&g4ȦGK&2M*V Qy(< d 4}r*ۡCj3Cqjvp_bg(4_Q |Euuy/ו:YP*VBvHKbe]Svvگ ti4@XrW[[1PQo!LSG1!w'Ti}-4C[^WmѦ8t^k̯)A-`wif#JWTu3Ko' fO@;Tc{_8*3;qJ*BPRD-O8 >ڱ'GzlD:fMOn^o`#(\C_#`ޏ!vFy4 V+Q."@;>DAw ؐ0pGCBVW#iGiIbA 䜲B|Q( 'b$jPŜD(NǏ7IiyoN___ A ciyD*P1ZLoWv Xɝ'1Q*PERaBLCJ)qfe YZQ{NrzH&@ JLO5N\yY0_HQR\,#J "U8 ZYG$ ȱza2j"%*UI2<-ʊLd"S-SVT/ h$rWV2&pa"iZ$C! %ʬ4*w'-@2)SQ&0$z+I*e }jJ[ `Lh=} k!ziiS$2H$E^ /&"908/ZdU/ͷko#gr rHc}iHRYAӜ҂!`e*dY.JYjRr"½-l> |Y!KKH̲ 2.p!v Y@eLHC7`U?pPZ(+˩zu3jZZfzZчHh9=[Vmn9Vs 3&.4!q_g~xo>27a]*kl(}{[Ul}b6.瓽t/0B\&D4OUX,%'ӵ$͏4p'u+f0lC4W Fi28!ۈ; ψAgA3bx"D,1|h'b ZK8B5&ܽlmȰM1 Tu<((qDyx-i7<$w=Heú/Ef6X6\_4(^=+Jn[%2%Y 3Z,>O,UAC`?2 8WcV|f@P2^NdppFQ|0:j3ry*^i<ŋ >lpCcɹwfƤx]N X vrUrJFoA#`㨢{?_q9qĘy3<:qH)k8NC+rRD9~~RJzD 5?yMb45tHiFrpfN)*]Qz5)BjZ&!N*$#n6If)7 d$ T%JEd#N8'R) #QaSmHztjf".C TqxA[`R(2+12\)f5K3Sd"m0bE.T 5*KH`dӆS4"O4'Z.@3&͒45FI'ٸa+߼E;eV= yť<_g/'finR7ff!"\7׮۷B+NZ:MDc[퇼kE -VSD:/Zu4\].OUI5bXW&%gĘ&)jLJ^lUy2&a5U\"6먯 V(8b/]sӘb2'kt+_*5F#,Zaԙ,Hi0t, I sMfW+9%L-MZyI+ˇaL1a5<:/tOi\+4b$F][REB=nRȁRT1AoEr)] `сҔ ʸF>/M=!5myCQz=$ r֔z%◞ևAh~@ Aau;3[YZ4٥M;<.q,R7ד<>nP!O8)^l_ }"7]L8BpD &&iu&Tִ6U4U=3U Ahׯc|TjuEuO'Sh2K3藫8pX(&p6A㭵 Vnq=jil:4{\/-'4WG_T5>vuOS  8Ӓݪ{?wexMX]o-Na CSH(z^oPƬ]|׮kc M>Oa!K1bJ6q7șIJAtދ==ƉAP Yc9վn|h[r_UT^*kr[ZSnwv]#DU}_!#@Z!QC%Y5G01K%c)6X6*sYF2;[46oIwl(:;ޛ(V;KN:LR{zs>2$))h`?^; }wE"𦜬WЅ=W#᧏fbVcH_Ac!8(W!Qd%̃ T;9j*ҤKΟ$f݋#t?tEjA'濕Y 5IyG;Cm<{uH+&G\'_(>Iڟٮʕi.wrgM?_@ϯމ[|1?dS@kyn$.G.6+էu/D*nDRgo~=_Kmf1)τԔ%0 ~ 6Ӯ:^,iiO8)J蝓vA|9e.Ԁ\666R0)IM %]Ѱ+duj^j1l_ǗI׳kM'`v*'AQ[|whj]O^M|~{5Ndznr;?a-RwzoctKrn;/ }m'?g1 ?ԐWMdv'<+!3 PbmjݿގkW,N0] v`[D7c'%{ u9Wz hڗD*T׮KHME5ڴ;q̷m1ڎ9jܧI $BC\EtpÑRJq%C&\քGrlgd~)q߀QZ^Zx{ٙϿؽ)>>l>(&a-v ˿pFӕ*LVlw@F':.  KI!c՗_BHQ}7xj1b!~ E痘^F;tB͊m ?wbPbeQxMzOEW4ē"͚/r51`08u \RykfdsvŷoO!uM%PǿGý==,|QCtkr?K[oL}Qƨ` 1}(fGn![ݣ·ằ#)" v @wRĔ[q3Fh)onmwie˾+_Yme˰borAו[WrŀR-XJ>*wyD$E <3Jjl#^:x6+N>ϲ֓0ξ,E[\we9gD^#SR"ۙϓ'GXmW@5}ׅ/S6Mo1 }kxD?{o+^Pl5 VIexLĔ1*I):Ʉ4qSKat0s55\()6VIi3ӺcLOk1cP]W29.cg]Y?sfx'v d_#efC1[y%Iů5+jDG I-9justGL/NsfiG})ʹdϹ;f0dX23j$K5fd,&Te"Hf6CytEAP͍c rDj)%p#RȔ)aT!EIg(Tʙo Ao*;; cqؑSN4 R&9iHI+lL$iPOiY*B2UĎZ!vIݢ7gqW 7o~6w +Bt;"70D{+65f=\N5׽NNhBI=z?\jΔVH?BrR(ooNJ5]<DQY#R/?]t>nW4GW↷@Tci[]YD UviUTm#1%JP(xBmĖ0wfT);Rλ^ӤVϓrh =W7_~,]om_Et3-;|X}yC'tV7VŇD(ZAPI6Z-<Ӊ`G3:Gr*S~31R)9AĶfv;8% -(pvBC\EtTD@jAeѩy 쾬 5!~$HtYʌ|R83JcL)M: Ƅdʿ $6`Mv!hO{JD;q`D#N- 1\((iw%0'ŅD"Vd'ʼnRCrr཰@jCD h{m_9iw@GlȽJA-t2DPAx:La+Yx0oCNRB:3̏Wmy__3Mrw/Lk4q3|*; ~JVX$cQʄC`ǭUTB9]FO?sߡ|.u7mdy`n|(NnQ2эǠB 9 Q-z/!ΨEU3r&ye_sԎ ؅UV& Փ?/1ᕅ/[i*_\;mhQK?L =@J?# G-{V4dוX/f5BaEҳ:enܭz3.\0lL14gJUyx|lJ'Z /~{`vyQ8*~4hg`<"'  {%>D9pbT~E2R$6z+Hf2w@}YߎECd11.w[zMU0Z#N)LU ł2F.)=ϑ[.@=cyC|xaOvZ LYeC3%Zц&C97>}9oUhᖳ.4 Y 9 rt6"X\t4;fjR!UTR]1 τf84d`H3Fiq\…fp@s|=%~w:?mҮX8zcR,b ޼xcoBFA%gݿ vygE ZHoL9*7G}SJߖWB_+͎~.$*0c")ʡSb*\hH^nzDKYEg}ŗs9nr< ~r!:k-NvV+Q.ݡd/} ɓu@;FYpck sO E x V`00rK dlj8/2.=sSx꨸Q΃pB#&lu/Z޾p0`zj@ȸЃ *.M׮^b25^_뚢Z(%:1 \[rCN6G]` TTR0GQ1/ Ȯi@B6KY]N4(SFy µ%19[þJL]\Su<ˌQV֩j5WQ8'x0UriC{]4 b0/#@499%hlfHEkC)B dogh_"-H'כ R<$['6K`-z98{Y" 8O፾!! ؗ3nњ|mqq C[.7KbqSBGկ7Vb7BLj p%yé0.H,RIydlS~ea%--\T $ï8p=~+ a)"QJKֺI7. )p, W8 rdCr*MUxRYK{  ;aբ&!f[T zCX&Wu%JtG+J)bQ%3.TFLmQ]fʛis򰅐C;M9ՊCB<$ew(/9+S|A)e .h/V,4ɝ H_ Hj:5zIIz 8+7RA@hZ:r̤ ԒH'C^s,x .hNI#vрBX:>ǢT%m;bP-u+zc' RI } ?Ӕ؛@*!r,8' Vsq}=G KT"Sx SߣO8%m딍GzF-Geɦ%ɦ.3Ezұ^\R:0s&qniMz^(A+)_ʒVk^2{Ձ0mZ ~z:3!cw/NC|r}i5yGNJU9s/|}z0Sϓga2f|uy82uތK< /{Nuw9XdsM^X4 y 1Ivc(`OQoӌM0BWg+Va#Ef(P ǔbgpÇ.I)Zְ=ƈ.b,(mQ~ղE%!݁ ݃! _cX<'a4{f +f! rV}md{X>Szվ5dˠ~+#ן;8 żrSFI8WkG1U[Z;xAÁYH،}uJ f3@ 9|Ms]qv]?jw~+B8W*&wkp%]N6U2BWyIx|/&LVo0Lxk:_NNIDPJ7|o3={Ɠ _* qɎzC' $[{rS=z?@P)=<[M]rV[ܮ<;m;E3^YbZ= / Śe+ }dc\e:am67•'gͫy)F7{q8g07e<7==1ib0n%+NZ@Ui yPz> $Ōǃ|fqul"gRIVN# ='-mO#:tWBct^0}ǵvNKO@#kNBfJKV.cD008bBI-XGw5C9*(&Jq`Oɵ{Г<K(}ZJm SϏms~MgW_s2h]Xn+t2&9 \ѧ0ڱ'xIv'm5ڜh9&؃A{ pxx|`TN̖켪RyNo].O龤s$h9ڇ?C4E5L1 7}<Z1Z>8{nsC#t;eݛFAR`sCh")qK`sT zkaZWU~+;6t W wlśWjt:EkIV=4%%-fm/MrJ>୹+tW&dTA۫5O=n Np-OF6LX0b d)za8C뗯O bx^H.3a< 1 p&6ܤ 7ibM~b|вG^hkKx{n">GI2x,0[CBp;#Ө[>_\>F 4q0mvĀO2}UL=Dhgتod.^`r/_QRxEQ©>ҍS \=ڶzݾd Lpnr@ TRT}?A>r :Re#xtyU2dDją ԔZ Z(c:E/|L=p@wV8i3 fonYf/A^w ]O͏WAO(#ݧY~kCRPJ@jcipf~z\ b7PUE,w{:GE! "؈4[),V ff23y) Ȉ{.! -=^؜,a`(;ìpQ[έC9qr[$'^rFЃWΆ-( XXW\]$ԐY \Lg~^.o?S^FH~XBb|}ޯ#5\T|x R!XiS=y-G?o`5 Oy'_~Bn=x>O&q!/Z)~~R  `0] ]9.)0f!]ӒR{fP^kИ1bg&!kc aXd^t5b`638asiNfk јcTɋ\'0Z8*<*ɷi\+Rn-e\^2RK(+g4EB@I1{Sr(hʃ1$ݽFH`\sZ; ධz/S#7X8Z7j ?46HF]J,5 +ՌZ+MD0>AnG2y Dx nj?]Àr#xy y(/sUƛ̮w2f>fo߼I^G^m(JW0&;ւ xadv*Jk1%fLr畂!]ƼYΖ1ҭ |͡*ɥqT0\Aְ,(sI^( K`beQh!56BfB9<ݍ,XSL'sCĢP)/ QKiB'(YP1MbMa bl #©V"! B[pY|X4 pgITg*߿f_n,\0lX\$cB[xKdaiydaG!KB?Ԫ'\W49Bua)c'x0qJZvC\E4䭟~$Zwa:"Ҍi:l2Hl ^ du!_F[^ٿMp|OάT5Ƒɬqw9t+/G'\hu?=\tȸ H/@@ A^( ݮ*|JbiP r>P%O@RԈ Y&A5  $Ԡ[/L`Ճ`y>P5}t_O/ <`+nfns,Ij"gD64T".;:l]G,v5e~x?ٙX0١][bݻdz+E *s{ԢaIߝ{ ~US)*A7fu2ws0X7ˣABJk %UFčr_˺UIμ%ˤJזcQLgֻT@}H.*)_2:hV-I7QQLT0Նﲵ),$&ɪ8ZM{uӒXYw̢K1ߩ9Z8J ">6b3:;#cn]tMEH=d(4Nqv1`ֲ;7nȂ_49>x%԰X_i_js?wQwz8u؊ٿǘ+u2 a`_f* X^VƑHK%6o#6hhg[ lZ8=~2/?(41B7np!;~zk2ւWbxֲWv %FRUy_/ά * )K]% `_bB(E\/j 5ӰM_]MG Khc.HbxQ}^x8Zl~e}}}(Y"7d\Y?Mߋ:'fݗ'/O.%Vw>lZ(s*hvh$T &0=TV鷓z׳d44G#4,' <8eBŪ.yM0 Tc4Otքof0]wv`mG- 'v4݅mKd ~x w+,ކ#t79.Rdm0;(divV'\6Alv?NgwG[47gsWg5Gvw%U!MS &[71 떊AI}F֧ά[qHƇhҘ"r۾ZW?盜 Qr~k/JU>'$+5`kV.d%Ťtg&. FVPUEaN9;P< EE. I0TӬ\rO>0fǎ&p6jmE_3p}?b~-w>SS-hO\.w峯$rW/ ߊgDpe=:`dnNǁ\yAtgĦ>QG!~C\b&i+tAC+@hn1UD4. ҄!Zj7k\_&D+X"E] iM17VDMW,Rjε>fձ<ݩQDe4X-FS<54 xPsn7px]< _ IUTUwJ)VRA) [j 5Ta$.0[j ma*Y =*kREYd;* Q,AwLY7S+cMȩ Vɘt-*DO7p[E!{VkMX$ IhLF[.昌KKLb2:U{2A6@S,߿)rs:B.}4 3'0KK2Q_ﲯ$nfo@SPk)Dݡb]:&h_b=IP~ޢ=U^n9P͹jsb|#F-)5-K )^ I.S&NpB^f!LL3r9cm.Me}y bnWm ]򇜾)̃ՁBRwGR3!ƺ?j\kO_6h܋BJ9n<5UW|+NZ K%)xJhIH3& 0 ]bKw@9hV)tvУNbo ̤eVeZ{Kx(okp)E O iu.qgFέM(irܼѯK[H:N#(5rOGIH`Ky I.D3Sn~qւ@Ry֟m (Ď d S f\ bD#1L}XGaݤj] iXX:ǥe {qrJ9RxJ?<&Mkok*fvTO>93aaOD`Ez6ZRAXaf??v&P`&P8J@g[;6.8Gڒf|H >8*eXҿ ]XQx Re0̽, -8B\RK(B-_N #p7 8'$0oYxc:u}@.)}6n8+ӬM^E|]F[rk}eW5r0 ۪iT*Tt^E(*!z?i[w:Ec5ԃv?=sVTZ-o[.+.\24B )!IyZYҝb8K6ڬ8hCӉٻWlot&$ 8]~x[' ݭi{ 7͊p;f%۽c˥;;]|V) .!"[^C6<o:dPNŢMc(n/q&76i' n"o4a7w0TK}J[i,^}īn"-u.qΥN7[rַbDwCuJ#q҈86/DJQ߰b)Iy:Qyƨ\iuTfZZ?{%1DSR*ƪ\}UBKBg˭a5n_ֽ߇vxͳ_1Rx\ܣΏYsOjwekt-rL#uN{qF@0W-Ί\UP}}7Fr$piya3 ː}**HO >Mh+@5uiE, 0u&& r%Q1/ձr$Th#LwJC tj=!h0HB͐^k1!y]vǨC`-ܯ~}ceS5ְDy`3ʑ6Z&$EqPd&@a`TzHYZ@D\$۟z%Ǵ$AT(GF 40jP~^dGTrpe?:fA"r0eh1c7|<0<2xѸT-ajG|zGUa_:O&yv`ʟR?ML˧ gbwĜyz*u=zFLH!9ߒob"5, `""A$y&fo7AZݽ_Svx("HJF F Ⱦ_w?^)%Vc48ZI%lt cʼnSvyxzkvMpݻʃ>[6(hզom5Iي 9RV B TJ<0RWX68 @wk.wb(]iVRAcAÃ"Jm-x`SoR}h N#FH ⎍IQ5(! ^j K(&YL^uqObBD fǵ`RdpHBa"5<*>&]dwτ\/6m 䐊м]7Ѹǿ'ĿTn6.SφqNXNal9-%|b.q=,^hDO^)0BnYܥ33w7f݀33cn 6|5pc3da3H~fO&?\JGBsPeR))NcЂ")Hr`CHp#K<@p-pW3(X~s.?.J ^b,L[ImO<ւo0 3__d/6%x:als76Úi/[jc[5T(&u9t'rTHjVsփJˁd-ЗUzPiv9PGT*IUՄJTÚdIk U@jX=oQ*R \5Ko):wW~|$_b*")p U]SuΌSsv%sĔ#]2&yhKtuk\VT&*[nph 5 -f>^NU%#%|Mk,oW~HYȅy={uj!uJu~tKqK!tfz{wnm} nqNtGʎ?+m :.Z WU+)k&F{g`9-=Qg涃NWΕz^QyDPvj0l[͖A뜲A̜MnoIf1>p~ao;[N`:+t7+?f4}Ɗ8Wqp7#x;(%ϒ֞(JG^o?+[cv;]PޛvkVQ!!_6).>݄: *uSY8Z5O5ڐ\DɔbgwۡZ`̮1k͕ pk܅$ŚWϷ֗T+ku%{]\H FPq~B+!?'HO  Rv~B+؟bE W?͕'bpX \*^J_IrO -M'hͯdW?͕(cW?'8EfYl^[T&$ϊ;CqKC, J$w')5qy 1]M ^ݯ5V-yFwotE$aLU-Scf&G9rg.8kt)Aw S曆{O6Aᵷyp%(iJhI@,F O>I \rdWPDZ'wYm]s0Fq LbFa9VvN+yF|ċع0IsaX]!+ |6R嬄f>(j<,0B΢Tqzbjg;(f&&4 0»'IY _ `Zu\@AJѪiG*-}<*ԿJ>={4f=$7EldO=x.7w#~֒ GSJY(z fuLvw;o&f8Jun>'Di^_ gȓՔo0p]Kvy3Ͻ'&Y󟸉iQ!o74߼?4{\{uWu,KFy^ ђ[c9A:, ,ƤTQ0\Ũk\t[hhXV89o<[NʔDur5bb@UqmU!+'GqM.K) ![L0suY%$jov: ̓c8U06;1גR"j;J"?-$48xřL9a6pؘAw`Wd^-/;m t[oGꍵBggߢ&h*e\l+6oEi ݖ0\ 0Go/j) )D8"N h:ϸRb/ppf#w,÷C\X =oC?ŷH˧ = l ]?֮[B,/Ywwܒՠ3O߿%o<~?xЉ [&rc`^HJ\?Y-=Sn^#YaK<1Yeb( A.^zLJ}(HKD;CjxwPS;ާ.]V;̱i;Дܠ"K1J5?_).%÷.[W XW t[?T8 ka^ w So/˯d*rTPptv\㥽36 b/gǺ.}z|p\zPN|S}{vcvaop.-C@4>L$Q؄ƃ?Ŝ0lb9Vs?̖>g`+TūF`'Scƙ]:3swcOf?]lf2`gh` XfxVH=wzQo6{uIlw m#?lP"plܽp74m4q<1o5nQxDQd:vVqi ی,B xːU$m[[bh +,7J Y+ Dy+:UI]& [IeT`Y SLE6\'`bO$86\4X$gV,5x,כUIjݕp1T vNuL)jWMԾDB.9|F1k<+ri| _7~5~Je`{}h7$z' |NFC >02~e-rم% ۑd` L;AʼnO:[Hvkc ]S>Lm$>3"&AM.Lw繍z6vmhc'@1b*P8TRLd$E$V,6B&r0~5Cl,ntfy Eon=n`+|>J!kx(DŽѢp̔DJz]V}~8uc{ob| h#3LְGw'"~N;'ؾgKn`l`_p_Inr +w/ohFȾ, G-sdTJ*8r2c\E*NcB˸ْ6JjWM%]Hs*4 ζHDZ2L zzzaٜ+D9I}ncNqKRk1U>8uF[Hүb qgmtW,]~‮a}a«&薗,Q8V%Z?x)9ø͜#HB`D8B3gM02V0GM%"I%n/&HȬ5_ND"F$0"VXArp XTi+ piLP={]C#ohpG!?|®W@fFJޅo+a\ur:G6𮌱Nuskv#; yVw]T=v#H3=@w7viU"8VIy׬v- BqdSu>nC'nU11(N8KHV}'лa!߸V)nHJV]8_GOʕT˘pj -/N`;e ft kjl hs4"~^~fjأ{.E&5V8\b " }%-h6)FM)캞);5+ْ5%nq-ѽIa Y r]çT6g`+b N"G&Q#DYamkgxT8Ƚke1* wlw5H d@qQ e*rNh*t!a3/a3) sYU(cs~xOqwj$rH(/GxW/%ʚv$PČ')~@b\ CvLvK[ԽzxkI9̿Q+y~f D>97"v/!1.P;nߗo; !Jf,_cDB2Yфm$dY'o:jizFs?n2u)t7|[n}O=fOwYŨĉ,Ɗ4uc&#I(3VZ )T&Ʊ4B)$WG_Wo3R{Zf#W sI~7;3wjWfAQ@tyBmR~ƭόL)<& `bR#,$'%HT%EJ` [U[ۀ †{+AژN"1k O N&e;;]±թY(FU`\$O$O^ D*S 7OUO440f锐J}j*X$hHOhA /)/HF_wFF6~45z6MLHw2j#$$R$8]+fav3!޻p}̔3\o7@~:}on0`,{_1zc>1nx_u;j<]m<$;ӎ`xP Q2洫jYK{"E{g5M\"^ kzaoCZN>0t^ONzYڍNªKd0LGvPMD-]N"pk\w}xQwtrx41NčsgJwgDlWG8ޝ媲/gufͳ:UA7Su_Eר*εqK[Wv1j+QńlFܑsrk"CʨPJ\XkHMBIWIࢭI -PZ-H!V*r;Վ,٦Šj68Hvr['ߖ"8:mU 2务k'$)t(&~V9W7#EDisQLm[iL@(Dڢ<~ 1?^ZJqٗhs͹Q ۭ]sN ?^DA4'U8QQP&kIk>kR;kg-Q,',ūF O`8z@Ay%zlӤlc&.F=f툻>asT*] 7F0XM'z'u.|Z?Wԅ9q[* 6'VE55HuB5T]–}'kAiE Def+DRwyowcTM>< &G' t~=_|·l47>~.aY!]{>_Nuv?߿q{;^g_]6o?=:9|t[VĜu"槇gJlUl~|ӇdsAަ&ȷ>SYJ7?oXXpl'xsx$" gs}x)ĿȭZV~?1;4&+<ͼR,$Fg%z޺NDRm7Zg8 DQu/ࢯ1AeWOKK^aap ݹ}ly>e4$/ҟG 8/`l vdp0(l|Zrx<㝳y߾<>e7ɿa ~=2_{ڗd;nI hJmXʰY, CC[l7GU,VI%"ʈ8_fhtq 8fS2w+6'OoI(زuQ((}#Sa,T!wُ Z~sn\s* ߯~FϷ`OG- z^76 s_唰ău\& Zd [)fF2A(>R!ӀJ 7U"ŭ",>)( d %}a_NL±Y|\/{]KOyp@ ףx9/-t(=DAp#p>M=0"͜o.rW{M| lko:p3\m \i"`:.Q_W!D?w!1`7o:sBS &WF~Hk9"4]u+:{ / qo[DCN[bH IနA!OYF@AtS), DP=dm=Bb&c^=0Q'.&P\- Uj 4FkV7,8QDu՜&̝R$py sv(ꡌ+MAWU_s:en]΍!h|Ѓ E H¢U[+'ظ~m,htٛkEY˵cr\ۢTRgEʊo QݹO[ R 3%x6fq:%!{DUfmc!c,bZ ƝJ H 31%gÓDCb|f ɺ.ex6]Gsjllu5ZlF$O)lC} *"9Mv<߁[L0QD{Ǚ&$͌T p!2p3 hpvg'?wܫ42({QLYc64c[fspq41濇Ee[GkLzV]19>h\ TO upd)ay^:jR- .0}W3r ~$%|Jo8𝄞j%0a9*^۶Fp6/ݣ+@YdsDyIT|@jJ.i.[/v Qa.3[67b Ж'm: ,xhDZaNUVg} 4.h*J݁ɥCag.=i.e4K`i!4.PkDK\z\T*˹4\ 좝2K2._TcK7C}UZ#^jsSRӸk7|K7A}UZ}֥ͥq)eKQp&PW3"2ƥL\z4.͡z3"DGT1ɏKy]C ;̥%E⺁N p?I;c͢rw0eqCSHk7nt=Kǡybz: ҈>ʄ곐R Šgsg_V%/; p& N(l3{5ӏ8,8l+J;mL#`$wD,M(AЮg|#[Ϟ:zr<ϧq"50֨^=G;}iAm,P"gdpS-ȩȬ,̆1eɨJ[G,Bvgxh_Y.W5: qOTFm8=.uK i2u\&3 ;aK9cBVyg}@1/\.P$P ({D )yKBC װPƂ'8 t#|MpqktY덯eP;j/|@]9D *b8̈́5H 8WNPE,.h正ZfDVn:$cuU2P\!"kOSDgu0;*|!im`rWq"%^zC XE-fJF1/=8nf)zW>T D5 Hb=㭷ǐ3:izDK"oS?5z]mAV= :=%7*k @&.2m e9HR>d0v3x %ULnu 2:~eqf+L)( 5ư,(0puBX0$hf"gݩ|` 'O0?4?3/vM__z1d> |*eWT<<WD&|!ȾE}eNT<1:DEKEF8SELDAm_|+Ìae jIЖZ]h[zT\ &α-Ex(BjDHQDEyRkDhZY9um>  pce11q S/. H83Pp[=2;f=~ؿe'rԚnV9ecC#|CW N]m5C@[iM6PKN>QlKפuQ %4^l@ǁU-I[n XW!Wu e '<]ۋjbhh~9呉smT4#C4^J>2Pmq';nٛ{mQ@7t$ l8HHS]8쭋_8Ie2~^0&`7=i)܎8 g6<>3zYR[[Nw4n"Mkv''j.8 gV<\n<ڭ-RX;hCz |oڭ}avCpL)9 H҉9u"UDvz1&r}n|#FӅx0]<\?xٳIR$ݵͳRVePr,Zs)i{sƥAfSR~"u̱f6G';g6]jM!2WC˛wpUQ=\0 ť u;a:EiRoJvwm6Gѩ,q{l`wb,;uRh&tרMMTԺ)8cQu0\&R[K6 (!'I ܞXL ۃx;QÒh"U]{Е0ݪ &>\/nH9JA`tci2E׼cQö%&=O[4ݳKQq!$2O,Z@L58'L"!RtOqVkH<ꗨ" 8?;/Y_IA&EA"~ d]4gu#(=J'=1kANz: s鹏mD5QK01R[c%.`D`+cxˉiݘ*]D% T;7<8ɹ4!K`(P0XhI^#42A?4Xta=V)lH) FyF40fNen{B2*[Q79N% $وŰs%9"ʍ FcO`BlC@ tɔ9݀zklH$P Bx'}%yU IL 1804g:=<:P"Bv+DtSme[m^Lw׫٘CeHix6va "kQEtu=Ŷ fzNnDd\-6r?&/?M>"r#{db{*3z&ObSVJdA +Ci2gŁKwpeD2)t$u(xlA5B'3d85FfX :Ԛe'WT03q9 {3GkmH oY~KY,.{6x?˜GM;~CRT(Q GuUuUuWP,*Ki))R +-ltgaaV--gX"񊍑!RUn+pݮኚpFIbq&1#J+'A~ъ1ꡜJƥLII^[W|͟@W4iXͿh򽑒X1X,cEbFI63 _b śb"d_jTǜ 91r:\`Z'!T8pRI:a̔*a,V%bk{rh3LenCUm~t|wMzq^\3LezظZF}$ڢG9JvV RqCd9g5wxte\q1wWlp|'C8W~>"Hs}ks)VڳDP lauKL@S3ұ I՝z}?Xf?HӉn w~,(/P-||(p^#%aoV̏ æ$ Ԃ~ޔ&H Rlg!ߟe&0mm6e>rA NW[HC'wYn1jt /.\B߱kqiޙ#ñj/wiI[% \-l\c߭0r9>eAHCU4H$~jXawG }u1RXҙu ?hݚАWA:UwaĺQRӣu 偋ꄾǺD޶u G.tnMh+W U 銆@1/Rk+e .'mL]l7H ):+g,W-6@6cVHS^g <զ |%Xe{MXB՝K 9At`/;)U!?I9uGe>^ /hU&`xy`qak H`! (/YiO( c|a@_n~D. TTH=)vkaUDv{u?[\8Q{0Wo~ofo쮧Sͻ+`DTTiWه^)9j zxQ}O$ymX:,O4x`!&&?ֹpuª}ILOGZP'N8|B(>Ϣ,_WVK\1AV:Og'X $$%i Q'x_}<HIްyfACH3 x{Y;Bq>A\c{VeLbL *%1JYDFHd)pהdkr#ES<S:K?|Bڏӽ%׉u fiz,|>Tkq57 4ӈ$a,K4RyTL bBRE5+j46'bٝzfџ`pdoMMfYk%o+ ńq"#.m""Ɣ 8YX%%,MG,ՉU0d^ic%^au7_Ϳh=|9DC-;'\s0Ř+ro~<k պ22DXs=<@.^-஻~;k?}9ڕ" s,]\r8^O#񚿕Ěa1Tߔ!S. bfmJR-In4a= `5dsdAANrۄ2T$Ap]9J480?0ї`1˲\} \릒ܲIR +"ĝ 4.X3/2-lԶ gdp3mBC^a|S& zn|bXG }usםYcR֭ y*Z)S7 E9EJ(zrXQ%{2՚OdOJ֭[’RǨ)S-p2 i)CyǨ)Qٖrꦥ=0pꦥj`zRyR{8D䦥z|(H5S׬$!2')9`Me",$Qqi ( f1!?LDRyte 5ni S7΢1$wfOg7~C=TY) ^X;5F$U'$cZ1UTcTjH;5.OܩU0I [!eo=CNܑ@>!aFV>0јRJXI(CR#MqqyB(c 6F2?|ؗy6T<'-#QFx&R%I *4YL|<ό2V&Tdh9SԗҚ "!f"LFX mDb45%Q"&TZL5wNS"iӭa, 0hf1ɈI4uyB4Վ"itP29A4R8EQ/1 ԈңX 6'%#`Pt`As>Q,t4vձY wj4" r"MB lO'7KX$}Y./߼DN/)fi4-[̓_@d@ΗZw&cV ?e@ '?JӦh m4Z{*'%X5g`pYއǝ~ /pfpP0-Alo b#M] ]tz_fw*xۮ$w3NLmg{/o̞첇!}mXJ `Àde'Zi%Z C9~oh&^@d1^b24IF ;BFǂLqiwT); d8u\͢RwQcl?&C]`:7tcYY὘?/*Rxv,upᒽ,׏59x/e/ 7eJE`bwzWC^Q!Qbb ҃PP!K$GD2OB>[|/JrV!PAe4!ТI[jW"݊ںmTU:iDƶ4cDQcs[[V]?$fSw;qp0>1p!xh_ A%[`՗lm~ӳPLwU򔀔 KqX膹~5!`qQ\B:N$v'ֲU<2ynts n gفU?O+,*BWĚ"5њj}H>5u4L$5by`C>3EyNT`ZLIu܆l)ZV!UB)q_r Ӛ'5OU1 ̘&m Ц' $?M^ ũ3j4O`P%;} (twS KB2 qt/aytiwX?DB9nӬ@ xB:Do 'pjiw~7ZrYb  5Sm;3'<7685s=7{¶HE3_N#\!P U1"I% Պpݚ|_?w͎cbwՃzȇ|;ʚd1ˑ tr; o["M4Ǧ$yKݓ{7ݺ:wA~#Żͷ v.ioEz!,䍛h%Y؉gYͺLubehG-C¬eI z7 `};:HG!5WOvύxá~PY@Fck^j)<$x=XqVOQ-/JOI}'u\bgɆ u.J9Nj+stsRTP SR_wЅXK-}ڃ|]{!+S=A$M9@CAQd(߶ۏkh3!!!93)4s@ iz bɠh5'( B̮˂z{ > CR/:R^nāgˆvV|bqr6juZ1_x=ڤ "]( w5eӧn_*(|5r/&&lye~b.e]=ES(YJ7<, .D&n.h?BfߤHy͚֡BKYuK磪IZѶ.+jGq]Y m5eYdg<&1]qȞ3Dž=CFΝb. N4^Gs 8*pu) j] )A`Ucaxn"^*["CUa\4V]U G#8W!Rqrj?TA6E\+$u:ܛM+2pVMh6Q"T~pٕ%kC,5*bS` @r+VJR`o:؏%Jw)k)A@VTڃ>\l0(@{VZ B<=RGcd@-"NI}ݕؘK]Y5%X)ci[ bd#@D+0#|hƹ}v6R^Vt,˧C{lWR#D˧~\ZDU~ 1C_C<7WO ?m6&ssqm_߾^ v|<*|W7?XF^뷫\N_u7Bs(D۫>}~,{gS!1;JmTWzטC9لc0KxKrTKV,'6/D53d3MrMGTyGr'MCK;NG䣩ywOOa1HEO©0KI3ŤZ8=J!IVrAc*58khR]i:ls'e79RKk'QH9I^#dfeDH<^hY۶}quH1ic& Oż TdA!@@&]D]K$6R)Ơ#O>=/8 hW%$>haA||.XNS-$BvP$F.fXI9Cv@:ett=?%yNC,yRԙ.JZQʸYI'EUwB1uRV1~faa.6M]DGZz)R(_m^DdRSRa\O26F LqvJ&K@YMXZ52eN| v1hň0&=E^@cHy B=\`U`cj䴦 ,j윪JbeeiPw82ɰ6e#F6:7X:** c*]hp%z5F9hHu\Cj:~#MH,-S͒ͮEfFJc똍. Y5!\U (4@mE5]4~^.ј=5rym8ܐk6flLRi~qp3f6~F4lBU?kj X/ʲM cdSlj' R:KF0ёWa:ݻ>`8LG-#!NG]B52iGAѣeo6kϬR5;dR:6MiLvQV%4Dw1!*Kmi%:R?m UswU8c1u s{:)5>T~G-[Mt'?^}ͅma@mʇDxU[gʯ}pav۵uws+>n ,[ ֿmï{ۥ3?F?-(wI,䍛hM)z߻YKn:1ox/Cnm y&eSsϻ=ѻ trE$n3"McS%bTF Lw|1 tuۄV9MAGmG⺻Tci\ڀ%aG bų_~xIgAV)>\2iڗ~rba91 JkBP$3S&Cb`y2t8q9"I ).'Jaů#%0~¨kRWXՑo<bN73/R o[,bѼQ(>gP 6gKKFd>$'_4dX̽畄xܤOwUb)o֖j674IY/+t"@Ċ!O5"&Fh^&g0R$Y#-P$ҥ%4uaJ\;]*]pe"?1t] Q T0-=*= )UQai,BLi%Bh-, CJvA{Z*e|ZH Bl{BPe$ov&hj5ԡoaEتl*@-]?B%k)Y؅&h?RT$ĵ[GW18Έ2, %5*F9 ֺFx7Ǣ6e QhQKb*Jz=*ڻs>?[FKM7$nO)UVv1 TZn-J'{`Aw8z5Y~BЯ{ iщ12ǰeCrBJ o)5K(c~>X@= TzQ ~ (d$'{D)K)SJs͗ǃm;_B_Tw6m5wǻۇ/`2'ݧW_>+㓪?UTxޮr7i/?ֵ0W~QT8| m8WmbzOScU8s*rH0mD]7MqHOPVJt<"Uiy|yYH\B<)g\hHdIh)K4)gMʹ{zr1)'El\ #dI9#rgQI9d(r1Q!< dz9^Mj4.M9`dJm\>流+E3P_IcI4Xxe$C T(U8t I2,|.ֽl/5]_j%̩q:FҝK;tO욾R,~]Im,y,W1NHaϧzLmg/1&X~<6qͲ֊uzϻ)Ij -W16_05f2_R݆7n96 q{7m(:o$1ϸPFN}* y&ڱKXc VdƝ:byX3t{4p:]n1\AEqx.XK6>-uGj?^TBJ^^#Dy[)R"mK)aHm(XY[)8+%fXOI}ݑ p3R^DB\ʋXOK}ݕo/Rmqsoa"4ד.RTnW|_*U3Ҹ;`]R8+Im{:o+eRSR_w|="$*<=|iR[͗T$ "4AjkŠ|=zv& AQYuVh/ w%(Z!5 Řeps7Ey+ToMz =՗OJZJHjxqhr4ju]:"ڴoj7!ܐhOJAa9A(e!xgki9h|GaN e2C [xQ<¿ ՝m5mxVQcw{qj>~WC9KW>6&KeB%:ILtl'lKٚjKߕU`Ghe*VTZ:/JQvZs-vl 4O𰅟녹ێ{Krw~^&l~Q;Q+GP&jwzm@ش#vYކH[e- &0/xQ3ʯgDE1է-|;rtG5ݽؾsñ'h^5zh4aH$p\ 44`Q@yK9V* ˲ХhN3 ƼaQp;(<n>Ag)FW5pFR .P` JsCXH%K1`Ljo/(_ ~߆[1w7ˏ "KeT,)=U@wM̬I pEG/ϏkC0LjpYSJپO8A)OFTznbc~눵q%j+VEqeٙFz\zQ)(cMhFRPSL^|?,0מ="ZMHU* fHD8cQ8㯿nEs J|_PGHV.|YhR}YO4uY_T??{Wȍ C/;E={a=a=2jiZ-i) (x զhKM*Ky"zO'.[~^+N+ @NnD']4QBaM:4W@?H&j*'Xpt\KɹJ;[[9Iˬ`tdVVoXxg)LYrD:o=]yŌTi0R$@ ejk SkB='VUWjkC!ޘ UPC?6M(r1zM:0xzeC ?޳= ;{']W~< bAX>L?ۻIu={/˳)-&umwn/2- 6n-rDP? | XXFM~5s{C3(m|(.+רt/JI j9"yR T儢޷oLeιV2 ^3͖y+d@le#Tb`}#` 1}lLJm8T‡ECF(wHܡ5+҂vWkVNkVѣ q3ԬIT#iJ%0DFK:뮊z򱥅*j`k#'E(o {sڃ R]U$us-H|s"/+Te]tRل3b 6ahZl5 +X/.7驫<.c#Ǚ=gaq_) ҋ{ʛ?.&L~<߆ ӽIqW ڽm*t~kL5|yz=l|؟ͼ$s6hڲ`4)MOz[9*,Uؙ]: KJV?G~eL I-1IhUbƏ.m-Q׻Pyx1sqoGH& |;FV3 WR<4̕1(ݏSPkGvb&dO3U nRuՍ^^U{ΰL{4rPzGgOတpҊygrxjyjr@yLE㥸c'7Gf2zIqϔbLB?}hOkO7%nc/^6yK}$:_Nxt`.3GI~< TyzN:Q=9)HԝnmƬj@)4$`3g``LE/P8y^V8ѥhAy2+YaP'TZ 93w]kzyXHy]T] T0XKg4^ PԘg=c5iNځ}6ɬhFA;K } Hi;_fa1.{Am6fߟϵ%7fw'we /l9_J.^!jgvX peni;Ȁ+g\xf!@BSE*tpUlyS>B]BAB;s$`gʝ7ۙ$>䣐1&EA[pnv7 A~k5qȺqPC5(>pS"Jz%.%oL,OdlJY,<2[2|.Z ˵{r_&?jj/XT,F]LIo #چ|W\|7tCo)K.K#FZqeH))A?-EQZN\ J(KRJ<0Yb،ٺ3[Be%/ 5&lp0gX zUD޺h-ἹwPna5WL+੽[aW[0VpΧ4e4ly Olybǁfi=_Ejy0eJ QM_'M j+H67)<3gLNaylr V3 &/ng ",1J}%l)pSj&p>_yNm0tuqN\T^ZDJDIZrJAL{ǩ@{.xl1mަ8YڀcM;nبdr̞m_1ֺU0u>}Ok%}ΓaǚK$+7M HL^.D-~;.̃zJ-IHJu@-.kUDtUU#H&@] ($ zk$hRVA/yV6PmZgJ%'gi1E$/ EQR+RTN;'m°M#Tܯ2j9H!hW~V̕yoʯ\d*p,tQODߐ֧ўa!ͻ04T5uX`vw7`}|ݗKPaߓdѦ1ҋfA UB)/BV)H؏pFWAQ~fS͘$[ TTS'!ai2f! J{*׼X[Qj! eD z  g(ygSar2vt+hyM|HuSR" XNpd'7q~S`S-qr>EP%25!Hkσ)+M#Hh yx cQ}uqE(S^27j8ප:lg DŽ*ʸ➔Ji[lyǓ]KMyzۧ(ai1?M#&RŰ[;{Y-_ŚC2'0V D]e7}~MD`床V E/MJ\ 2H9QELq_͵Ͽe|po$f? C(QB^<ήڂ#WTM=NQz @/U)G~~ lvWxԻ/4iAD8}aſwNW]=u0BuNe7HʚOqtp ;ZqS>J4<Ue)+X jUeL)AFF$^2nv%7O#vLL7sdZV)#ѵX)`'S ȅjTʨ iΤ9H<6 J ghH&ar$)0 Ѫ%W-Y¹7-{\0?bз)qP-nx߽>W=yj[2K.9(v]ݖou\J~pG1*a.{`DDڹh74x[xfqW\hݽ:j<׌׸i2qS3z^7eIT:es1sDp䔑`eP}h~5м&I|7i1pR7z Mcq_1|K%uxC,T{u5P Du΅b@t }]2H[4뛒_ BṴ߇_/>(+A؟aa( BUYHfcI#iŔHcwi%5'Io~\8?"1I:m<8h2Ϸ?ѻo*Q jJ#-Htt~n4JϪ ]JB:!=Pdй}%KTd {&L@RG9̂bcIUvE.0x+'T2[Kb r$%Up <]P'B( ku;Cg^R#q/Q'@=a K&OӤd$ %,%\kdVIg=LxawrX=LF_a *LuϛCdXk=Yiqf흹29P`pidi V cx"<4'lI+`fUO+~y㝫UxYG c9.FOu`-XE(JpqwT] 3]H?AoZ[Zb x@%SARpDJbHi%Q  %mveSqki(JB)-wuF1~'Mׂ\Žz= xYRBj& #[!Ow7SBDb =# Sг6^vSMg9kgfQw:QqjU TNI"j37wWf=}П׬*R}3]##9cVib`AY Ў+cL z]+Rᐱ{kNJ) 0/#&.rA'o3-;/%Ϊ (s( ]|vv}I"Z{r^ s}.N7zF]T6Jhw?'8uKq -!]]@]+.n&;柞^+ZJ@)LMvGE8QJ&i2ptJOu:Z"Oi[zX6yXwu?]mtO<凪2mQрȟe&aK};Ϛ2Y;Acy*HˇcC hNϕPK4ۛ}Vf~SZFz{zՖ kU<~fB_ˋUۃ/Mca7U^/W;1~xz^*u1]}_*{1,nM_4syCC{~(eBK]UxzP5dI~mis.k4|L㰢yi,T뼕}z6II_\D{Ȕ^%y݌[_;@jfڭm h/qnꠒ;j4h#:}Egao'nCH_\DL:Rc߬;$#vڇHN9Hf!׉ /v?Sad1& TTϋt▲̼]+ehW9/c248F ^Ms24݈èM7# |uoϟ/u6wX40:zsl>`!kXSb{jWT 'vA GVsqPiwLKՉf\.a<*X?]fކ[(ַc~ܢ1{/e~J @dQM[NQ9Ėo]16 mVoDfzSL QYVKEҌ(2ׄg0c.@H%!ϹVaRn&Pƅe< YJD/KTk"ϙ||3${C Kr)HGz6Ko~Cp{`CΖst$ք4\ȉ&U tC(I͈CWzzh].75.qER\IL[Gersem%C% T&(Wu/e }L3 w a"_f,wY]u:e_-uVjBlZLY'*`3ĻIdLjWMn93 SZVY\Ȏ*Yp]L`&: YuHJC"6(#[)1 )%48g#znȻ9Z|dyx a-K4hNfs3SjF/Yn֨`TN3Ĵ#i&>HJ8{԰ a2 ,Y:g"_ QglH=wsh>!:T UeC+"OoPk/Ҧ,'m1J8L!#@WqJR#e\Zg֫1c\fj і^c2F4xbLV]K-TA}QYA ⢞3DY%e3c3 YH $ EoeF*P[àGLwDƭ|`"$B]͢)4ގ tn51"|B?&c0M񃰭=DnƼ\*OzJˀ.ԙ 8NѴz-Ɠ`9DB6 PR -"C_O2Dy^.b@AEw2z XvkY YҁmôHHM;ggboci!9B))& B(hybС ?uۃMՠ󰿥 ?I@ruhpHLPi+n3ß<>[ kd /%ߘW_n6x+BH\.-e!'OWvI,{9iAZ+tPAU+%9 +\VŠw?lRauLDaޣfæM<e#! - {ƸH !,ETUOTX 8+}{oqDR$8<cBc ZX{j+t70Q У"o 4 1sFՂPͿ՟g18R~C8(۫ӋQCD^3Cc,Q{BS mZhȡ6 ;mni6ք241=ZXlq\-G%(ysLNj&(`R")gKӑ}1n&ߓ}8CoxiUP>}ېBK<|sT*~}Fr/=F\ׄXaè:,qs"=0+w| !ʡ󵆃J CG/M8Zn"-H0NE+&xP}Jk=l51 Q.KI>D|mkdr,Ltc܈.M84*>z}>iO ݹ8/e uN;.|r"::qR@]4*3JdT_\ ~:qºࢂ Os-9QiJwE`|28/H5*/ 7dM9vQm,dqw$Dv\, ??y_i1ty??ΕvxdaD"ifaIԁ!F':mV䱦ko|Lu{|:5W*Kχ}r:R,$ɩ% FbΡ@v,kN/ *NQdF S9mO?Oct@<wpc6CN WЋ7h{HX9t:d+PHugfQ,*ӌq&oDR\$Ϋn3 +Z-ɹr)qDEΔdР9X/M,`Ҥ'v e}䍘Os=]Ā&f<}Ǎ$ƀ&'SENo| ٤ݷ1 rCӥ&GXhAᾨ}Ck ח,aM&#}n F脩= iF*P McyWKZOP'H.Ѣ8>[4YU$ u#ki)ZN 4% C*>sPEZaOA ;7sռ׉Wẫ9xZ^njJ| >nU)zXɕ?t޶~hkй뒶PLFg$7LJw8uMX]=CS\:建4ie&~}AdvXxO^FK)?x v2pQ5Ҳhd$ܼAnpg@>+"Hљw[fm[)/l .1tm_KxS%(8Ӕ)Fk)ѹJ]YsG+>ĝpgYN $R(R6Il 2 Wu}yUfeVX|z+O5MU 1g70O Ւ˒Ɋ=쀾R7 ˓qtpd89mO Fɠ[Aжf֏G)6!QV?KvU)!&>ζ]15(y l?~?%* *lytVXDP؋;jwS Z:{f·I3!um- Ux!X'xI Xo"\ik7-cE uU2OBGGF< <8@Y3 [bgmyK +b*EmrL,p[g)QFh}Pi-΁h8,c; +BJwfk"U0gUuUe-H,!Ah X;Kd_q+ } E߄u G3*ǹ Lz_L 20%p]4|3 >H>e;Sko4>9If+,B#N < >蝑;Z`ZB$FGiRJAKs"JX>knw{d<1D OSs6x5,~K"++Jc?*SBr;*{j %1~_ L[no _1˸G$1+!a Wy`M"lH׽{لȆ2+^vVZ; iß ~pxᝋOx9tklg#S%Fo]A/7A? $|||zl-Q ԁ'Yjk>> >ۚi9~oޯ1tpUĂdq)烣Tg|*aAbsˆeq zW[ke#F9'nGyޞu:B1ePZ3`SݴѿY]3Ed\ Gg+ւK 2܅\3۠>8jUEyn䱞ݜnΆDfѲٔ_n\OFy6WuɁフO'jp/@ꏖXHbtJle?N` ^@VDX^X wIe> Z(bvzJUPv".__m]1@/zgѨ vbynl9]䚥` F2=xX[8p-!h95WkX7y3(n TxV(Oʀ hb|ћONU'{򱔲>i?}֐fŦ.'LU.XrHX#h";SLٽkTs7% ҡX7Va{P+m Zg7o$W}2g8}Wp:IN;8;\F?xrՓ. DnKxʖ%mA1݀-#(q8mSEol,N}8δw0ٕL,-g{;Θn9l6uzGxT8%UAǽu%?MOP_7)f=W*A+Q`$mx{f$pکZ,ՊtwKmM?"OT2,gРu E鬝+mL1΢X4b4T-( WM)SQQ Mع0%Oc RgFUy-5R渒R,3HETlY-rlѶ~Eۈ3Oʢu.E- )iIEFpv'bJ8Hscdؽt;1t7~Nhdѽg2NlW%,,c#QHp`3zp-ϸ+n%c#h;]qc*oŢ O-'bjŜ[H2^)x$V|ݛ rTY8ج•7[ 9#oMƟ[_HyªË<9Bv/+ ?/Fz|#=GIǤ˯'0ZM('͍Qgd:[0i}qo0Rif{G&p\=EԀs̤07 1V*/HB\b73i3Iݰ~n?肺+5)촴2~ɦ1=,_ 8aAcɎ->?WIKsk5кeG)o2mTv=m̹f[3_]Ҽi{G@NQ">qKګŻ>(|SP|[?;/DBѬP4+ͪ)dxLl!PJj޷,K^4f8HOd>3.>޿ͻ//{Cke8NWlyN$<OAYc޴f~50-ـtPam'6FFbb"Gpke4@L,Ç&BY*jMg#F]3+VǶsjG$ oqm7h[Wm1pDQ^JY kƮz<3H)w wMVzZRٲ:Tzk]*nY+lWwXݕx뒏+]5ϗj 5܀ci{ǵq}-;>Y 6wemwjS_OfxJ1Vݑ|$6l ~~l:ّ%~>Nr3RKmPz?ן|:;_VBRW?{F=2dlSv7;Ķ<j鮂ޝm P묝oůI'JAE¡x@uq.JRZ2TSeaUiܒ2P-;Ȼ;b6BFXC>3]J#woo1g{61]'>enoA;V{S7_~|џN#Q?a}l{CZ/CUQ͛dvsqZ`AhRIÖc[Ŷ%)i)mQBq\Ҽ@H0/M[ ?h2"L-PTg1f(l$J`D•JC=Vd[XDA^SC+.yׇ|nhIdn \NP89"D$*M85"K̝0/W3CrsdY.ȋdDf4~8a-Jr"hi;͖ͪ~G>|#;_}Y^B草 ҳV݇(v,TD6FDDŪc*TR:tK% bC4kX{_ƫə3^M`fJ^I,R+$~:Q B+OJY2= o"i=9NV?Z3?ͮR̔!>[E+mz0 _qmR\p/UXZDZ\kShDM NΟ\nvUxXl `4N8\{PGSLfvQىR7,Mt=[J*:qr*JZDXjپ_7ᷡT/A]L v'JuBբ8eP4?buh[Ї*QG-/?姻8';,{MiӲ?́zhZ'?-m | Wb1-H)x[ wӻY&dU. 2yN0Qi2mJ(3}. B,BMgl5|8R(lZ'hK1uY18# "XAEMqs3QgXa8:frcXuD@I[=[u=++K5@:ZE :4"e6 >G02CzAKtIp/F銍ߦ'>A_~JͿ^ogw1?-M#]x|>%~K[-uy/=f_+ c2-!s\>{\QT>zʿ$ .&enzF+4qP>m*ͲRH m.U]+wn;;cc/TI fma)eZ3- )%M5w64a(<}S"$@_>#>J/g4Ɣ*˘ppfUs^Q!SnUFxr%q(\ 蟩J FQ`B7غ.mcN(z(_,~k)ã~ac|I vgjI?pD̗x7s?;_ΐ]Wi#?ߡk|_6) ${旿 E?Fvx<ό5hv<0d!l4] ,XPHTrA65 `̈p{հ}UCP^&P0+vb׬! NNbVL'Cxq~S+ iʐEi"UP:(ΟՏPmpBҁP]'7Fs+EC]R{㖒0n)Sc!gTCՐR~C-aN*sFX>j7i&ˮ쩕 3.II~Se _/BRh_v1hQK*V-),sz-XxU9ڧjHpi/?nbCΑӌvY&1g,O3$bF6vDH' Dtvy%J/R+J+ѭm}feV**D'RV%bhAZaQmAzBQT^d^!|"J݊jOnjR?qޖ"T_lS :Qq+}T_lQ JLe\zh QTJT>y#G)7a(ƣP=bjPz(R;ÈKQ}j'3J!ÐcCXbj FT}i󂵌 ot|&`>ֺ̇B)/k\ eR>!( '$52 o$%]Åq5z.E m"f4 jaN[8d:p̘0K>X9+hha=Eby[÷Tr8@ #  F͙˔p@7[x|\u{["0h)r%! GR2ALQ>JRO5.ҡqo1ڟP}Mv(2 Tp(JY* C,.rh@:WTUH8&r\Z攫- e[ئ~s+hͷBJ|TfhQ[(l( Z=P ̜l1!Aߏ[^ *C~/J}rN(=FE~ʖA@X w(h#>HK3?MUZ*/,KjQ W!N -UyQgI~$ᠫ6߾_'QB$ž|#'^lHi?-[e*(y. pod'9+n"$d{2'7f9~!}-Fv>r{ (ҤzmUL-Nic*Tbx** }pe1zbtWqЧj QRDep)*4%}9z@? pBgyRϐFp`d.[re5zH}HQrqhLdg,tgiQۄ?*s xZӇo ]@jquy:UʫU/>gk*ٻFr#Wz$ĥ~gu0pJ(5I1((JR)v[3X—L$}*tn&|I-8-"'6d,񌻳O:P~saIIJ)O6r; 㩇Ify2?SJ=}zFm;`|K#8q$ɭ*}4FBk)?ϢAy6ooUeoՈj ҮnnaVsܧrfjjLStV$tLDBH?ѕB#j4g"Da2PXV)`5\AK&pfM5bQ;.$K&dEz 28A :*rX+!Ē_6szR}RB֡&FKWĴgn>7ub{_ Q½˟>?m]nO|e>p`?ӓ<],WMS)~Hҳ_[\su+x}඾}tƘO.?!kW 7z'- %~ PvjMp"*剡 $ωvud3#ŷtM\z0BYENfuX7c_ )=P}0 o{ƻ?j&.MKi[Ag޴"Qh%<$!0N>\ԵvTn|+=iQ\r6 WK <)J%O'm%߶+U2RVvDfT)>I5+%JX4hNYx0gg'T<"}tvR8n'@W͢-I,jyJ~\9v{?}Jf#ƕe{k=6̀L͘ xA0͓`Ҡ2&Tx&YQ IgeDMz$B-@"i}D`%;lLPE]\ -*(E~||zם"ZЖF 8Xڒ)׍$UO)ynE]W^ ;u#^&|PhΤb_WK }B({w~뜫X*Va <#9ЗC }KLwm>-[.ߖխxWYҽB!ݮ1W__}pniX]yCݡW>?4Q8R8z){^ϕia[n॒o$u+sTA= l?>og5kPC[_um-?7¨Uo=/L'u#ʌXUL^ Sq*YbZ;w:enN7]>u_5Q}nD |gu"%7n[޶wA%{:؝59pz$${QeVP7W]|ĆNQdvz@7=\N꜎(F :R puJQYvCgnbW aOd3PI,HJ=՞_Ldڼ|D{{i~{Ft#0& 9R#k,յZ>W}㢩Pܩv^lH~s(8V5 vORTj$x `F>8ϔ7w@yCQB^JY-WߓPbS&=٥F* #Km7o%0(Tʃ!Yr/c)K ?9`jեTQJA=J(OI:b}CSkJc"8ţ"K:tTi#rv5Q1FFzۭha[^iޮ֩FFoO3Gbr.1e*FZbR):W^5ĞŽ/q̡Ih޿KC͉Wq4.]H+׫/St(KOtvH'}8*~>#3"Op`*\l^фN8+k YJr?<]'m8S.T0-Dٓ"C Yvӂ[[f!T].tW|G!s:}nC;܀2Aq{K]9=4]]UIPY9d dO(8E7[wp3D%:1)*j5h܅x QH[CD;),ĄRӬohytr;^_0fY..>TD(AQSgwȘ49wltszijPyi& ;$Qq !dMtj6v_XU40#?\hHy4A#p>gAǍOM$Un/h k۱"D4q#I8$FgW;z2zy4T(~`㦾fϨ/9HvlZ-l*R 遇C"wg[7t%2А#  A/dl>C1ʍox/ q')2lLi8qЂl< CT,lӠ:Qo8AAB/߯椤EWINA"OY&;#ROP%k2 "++$DAǝ+!5t?G `H`>%TZ$>q[}ecTC{J c^W# 82{kaVRj9#_4]i|Qd2Ėd-Ȍ|`d_6NeZ-a5:*ܘ &[6F]ze}ij4xj6_G~4UО DJє),-#@=:Z=O&ԕ0嬌%!֢U=&R~zxs3Og{1TqPLaAJ] vpv<q{nI[5t0궇!ڡu8Q6 ͎j’g k@Qfqz|__lRs P H%H1@TORG<|B /o4rܾ74A)JLvĄCͪ02(#d"ۉeQ8 #gY<]TQ%;{d9F0q fr*U1$xpE&#ΘnY:#syM[TWz50_}[ *qrFU_k=9 gzOѲJl$v_hZWpDjrDAI0z`hС_mx*Cku1!e0JMLJ#oǾbh :=I//НD\}Ѝ${I5 ΤWNV {B'140r*IIet͛vNϕٜfcjs{ (=R1I!Pf|9~#/xz0-9*M)sGpΓ-ۑoCpx$خ@gnݼnOi)Oi~*[vwY,TyDs52mPOs7fOewCj -TRrnLVoNSin?t@<wpc6fඔawAL,8ب#5>3˕dQ6f#4y#"qmi#8!q)Ϋ>R{)Hq:S!+I2棶,:ް|\ >>MBij@Ѣ8:>[ԢYgt :r.KKnEdN~)A^O?/A]rc9N :aP HB~FNT(L"SUeP|$/Y5zuBBEs0}TLe Qˬ192tbFA3js,Id_Zbj{S|8E.Zoᙬpu KJWV=]ei3\׆H.es$ؿaq /5DwVh T灗2E#T%ATF9?(gx}bCÜ{`}BK|{ollGi^)+pFFqv;7}{MԪi^.D|TemkJWB^ Tei]o p}~n6~񉼿b%[~\BDhۉo^ P,-Kԗo~nЬU|erS8 kYv[2 ]-JX:V.Mdt%J9R*KR2>_\OZ&W.t&|8\o}5o׈ELOm!$Z; 7Sjْvĉuqo0)+Q(ך)qTjBzn"̲'7\)T'ɁҲGuїT& d6]DU.Tj夒^GeBNZA0fx&XV2A#6 _Í)њ!9,dJ Y>jc,ܔB:K(),y W"8z4U6<.'"z>z|Ɏ uT)tݽWjO֚qz#7_cH!*띗zAuWZ|P+ HgJ*^^?m̔WhQⳋY`)WU*jYg!_?ũ(A)JqE1 b4+a:o\ ʦl 7PMI;ɵi$VsmZ[rm%~=ӹ6nNXK qK~^{$~-BOӧl>󹡐5'ſR>(.S<MA~*Cu8k׊*f!T'-$“}jt]}8>~jf{oMfg}OFz$d[BoMߜ6:$RO[[m89-oeUζŶŕkg]h[ٷg׸T锣̔s r^lq(E#Ri I,kI ZL͝RV~xTpga0v7[kP/A,DP0HR37ivk0]p ųu nYUJxow6_FXx [X9_ !3k`!gϩz=JDcO'̹兽84`9l$C"QG8p!3mX:G/6a6okq3́NKJG 4eJ5>I5Th TheOE|,G$OBE>3˜Hш"0IQv@09&Lj?qq6ְ2/ ()r?H_K:Ǽ-%,՚]Q»{e :/!%dr>:BK%A*],J)~^-wb*լKs,$ϮԝBDjГh;qPJ mKȎqb!V7J+x$ :g:]GwPTî L̲̟&jda$&ÉLJʬF+?<ܽ!"5.8r4x9yEpHe$N|~ gB IfE[yAۼ>R^a }ޙNOp>(U&Ͼ؁R&.T>.2=anɶzYɹ;s&[i,9x&44sB ~Ä;&\6?Nvy>kPM^lƄ:ZbM3_jO^bϫrO. u =! g~΁x/tJ]aW%?_CʮmgJ;C~ok&ۙwtnA% RJ(Zpr< jʘ| PߝC~ou8h@g!G܌g\r8qʂKLZˮrm۲O 0U˷}Jo_ -&xL.qz=ln&l 0=q#!o:xiBrJFMpXe{7<4XRʘzKA {wGß|ף$SlsY?t fY;V(N nLƕ(#I +{E4q_s MLv9%8"^h+ͥgUSX9OkIB^6)[M*5Aiu#2Ffݚ7chڐ.dtP[Mia5Aiu#.y2x[[E29WjkXNw3C`:}ƀVe%̷{0t|MIR1;Ҕ~^CHýw_Ӱ'U!D&._s `'༊$>)rn.oj#|Q# k#8^yi0'< =YU"pHDD0 ,ISDfn{p95{mЙ8 XoHy*``0`MfЯ;{$TPơϟz$i MGz!h'vi eR?r^ -[ayY<'׮rxDX`V%x Hd +a)*̓mO*'ݜԶ}[ِqWDp6hcn`Є:/ѯ.>%Dݢ̋ <,bb"Ax1)R:e}E+z6J2Qf/R b!n ف*XZ|6@F Jm#x}0Bך+3T  G#*s;&B033<ߊMOq+-D:D*v\axANur94z1勽H32%"aAtNJD^Uf(.g<*G6C!.gRiz0p#jg8+$͂C{Tac$@( $7SFj): E/:[VAp-9mC;CV$b 3O۵!fU"XūDt-m>qU"׸ӃV3TƗ9۴1񁻒=56"(D22d''eDQ6+`\i3z]aZ:~L+']P5!VpZÇ{"WIR]UJW/Fmܖ$YwK6۲ kI(=ؕ~]BO6ׇ!9%gW㊗l */dJQr`P t"^kO .7@R*.GmkVDotN{!bbwOd,Yo^#xYMg߹I~U3b "y i3,)[-}N_iKVMO%)/ ' 㰨ղBpqڱIŰhGX60Tߩ6x"pC #<K֬r3YpYJ͒U&L/Li#]0{3L Y1}84FQRx}SyOnYASډ_r|ӟƫbъ],*sؖHx٤ݷg_qO./4]-8Z\^#)cD<4a"Mpn#|C"xG=x%h-<آزު~⾍},+}PE[9.}ZDFvcՁժŎjª@]^5䍠%d@Qo]dnċ.Lf@^T+|(Uzj&G.5C\WǠUHgrI,R( |?ޥU1SE֚ 1N cUxMcd4S/:28OX;+%Qn7}; 7!|3ˊ֊K]!~t- '^D6.{5?wSMhX_{J4}ahB(HtNؤ8Q09*Y0VƘ!)u"yWʅAFTUę|e(qk¢Id8X'L(6豼EFY+5VC=Z$Ќ>ֺ4TE]EL&#E3d`JN)dS9V)QF"H]Vs{/ϕX󦣋;a6JXTA}= Rnlw׿?1L!ގc?3^\0ESaG`TƓ#fAӦd/*@;Ó {J%fuv2c8%2 [.pc9Xтֿ..`kx 0|>2F JNY}A8%zYIx5(`u$B|j>\}rbeM@VBU_F+ћNAg/;7(O?cM)%FI"29}(e\˯rhwrtM rdwf%A+mEr($5IT6paoIƑ-NfĂ3G`/DD: dŋ%Qʎ؜uލ9u;i<v\X/-G?Cs5O# 7Ǘ @]{S`frb )&@_]W/7A+u=.l $*Y*ƴϋ8_˿ MJ$Vc&vFG:=<2B2 +չAюOGWw`*gͿl]_c4E3rcg>&5Gjqrq9껸D*8ܗ2dš8Om|O/PXϱ/:_x.Y}(5Bq>9Zu{}pɤиp=`ٙ@+uP|iAeMpP [M hvE=Z?n"jyd`֒ ޲фCF95#B=J LJkܛQwx(\SK*=xֹ<oKֶ %v`O8y%d8-q<>9_C>9t:s4{8M< @\5ͱwuqG_*Ч>A_ݨaI{gy'O\Ǔ~u⑳3?6ouzEgl$ 7wG}ZM|@dA*9S%l?ؚǧϔFWC>Ť&9=(cc;DyMR;kn:@'1xv8n饨L6B^8D0sǻwn -U>#ƻq4!^λbz)!_L" ܩ,ߵ~8}w[=?\mt" DFv9-p[rzyN0:ez/#q^O}[陵EQJC4z._ VD eƣn=m>w> {iPt>Ͽn~ho,%=">tU ,4%|慧 ×tS5м rc KTB4Z>{X!V gPO${vI6w賝<@G&h hO`~2ƒG1rʙ{?4twS`;[ֆ/4'Qs+jl[hcmDYƀijb=nL3ͳ%yH1R+\TB#r-yQ퀸k_;RZGS ld{;:S+qB=ݾ#,>Ӝ$/n?\ү r{4P/KFA ҙJod X wͿZ \/WÌ؞%BNi{ۺN]`ViF7 "uk!|ImdKoQ\W تMStc\bSN6í.cU0 )yIf}rTԶN2-t^,j;ZTL-LQ\YhptM lLm?Zچj(E[F@jJ /pGE)J1:v {{8gю")LKdߡ.?LMi g4}b$p{;ugA?>j}WƠ!^>fdʍeQ{ N_ДP+TU"kT (;L5WLqc禮4F74 7@ p%^bodp<|hEDq!Pwx&)p") 6;r8O>|^޼jyվnoZy?iV \{{q7Oͫ#goaȁ脀9- _I`9m~H4T"pR4EiJLa&:?˜4WKek : 4WTMe ʈR'+/ha>#HORTw6/Qj+larـ++k(YETNPZKKsf%R2a8Xkt+_K֭֒UN*k6V;صd: ٌ֒p:~-R:A%UGgIti}\-}7h)LB!pԴ;_{o}yjۗuB}#J57~1YA+ZTؕ},}gb/g{lrg;V!(eBͪRtrIXfS֢B{aҗy` =Ig g@_3PpjBHM?^jfYn2F^m;jޫ"Nsh0Gi9Zpԁտ=#vLUb3byϫv&3=s`v!/Iȷ\g2~DtRݎvKi=m p&aJ3]9[8I}Fw;,lq1nS C12p:;oP'vvb|[:a(ernܒ3 b0I82 ?OׁN?砻!9+X GUAgsz*Ѓ'WS|>P(0KdnvbHЖd ѧ1`̃1U 5ݧ)?^P^o?Sd]~IРcr=Sj)ЮƳ- Rw $ovlñ7^"\. Pf1=u켐t1G]˼MYp?H묙Bw7um 7P̦+fuꊼk&fUe׈KlpjJeerr6P1['\KUh?D Ѓ:VʇLQjS2h5cC6;> }?rBV,ՕtӷȱܗLqn)]+UYj~>KXxjU${Dvm /Dl+fXs6bLRgQf}Yb*$7jًFAtqa8CR  @-O-YW^7#WԒ0ֶ~Z`^)űbR ;=uax[-~1S\URJ@< 3JOӧ](Bԫ@!RcSDaNL(c VRxBԄq3JO~v̮var/home/core/zuul-output/logs/kubelet.log0000644000000000000000005355443115140165711017710 0ustar rootrootFeb 02 16:50:07 crc systemd[1]: Starting Kubernetes Kubelet... Feb 02 16:50:07 crc restorecon[4738]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:07 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:08 crc restorecon[4738]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 16:50:08 crc restorecon[4738]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Feb 02 16:50:08 crc kubenswrapper[4835]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 16:50:08 crc kubenswrapper[4835]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Feb 02 16:50:08 crc kubenswrapper[4835]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 16:50:08 crc kubenswrapper[4835]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 16:50:08 crc kubenswrapper[4835]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Feb 02 16:50:08 crc kubenswrapper[4835]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.911980 4835 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914822 4835 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914840 4835 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914846 4835 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914850 4835 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914854 4835 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914861 4835 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914866 4835 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914869 4835 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914874 4835 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914879 4835 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914885 4835 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914889 4835 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914893 4835 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914898 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914903 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914907 4835 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914911 4835 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914914 4835 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914918 4835 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914921 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914924 4835 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914928 4835 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914932 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914935 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914939 4835 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914942 4835 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914945 4835 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914957 4835 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914960 4835 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914964 4835 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914967 4835 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914971 4835 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914974 4835 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914978 4835 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914982 4835 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914985 4835 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914989 4835 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914992 4835 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.914997 4835 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915002 4835 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915006 4835 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915010 4835 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915015 4835 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915019 4835 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915023 4835 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915027 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915031 4835 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915035 4835 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915038 4835 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915041 4835 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915045 4835 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915048 4835 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915051 4835 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915055 4835 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915058 4835 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915062 4835 feature_gate.go:330] unrecognized feature gate: Example Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915065 4835 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915069 4835 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915072 4835 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915075 4835 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915079 4835 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915082 4835 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915090 4835 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915098 4835 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915102 4835 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915105 4835 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915109 4835 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915112 4835 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915115 4835 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915119 4835 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.915122 4835 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.915748 4835 flags.go:64] FLAG: --address="0.0.0.0" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.915760 4835 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.915775 4835 flags.go:64] FLAG: --anonymous-auth="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.915780 4835 flags.go:64] FLAG: --application-metrics-count-limit="100" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.915785 4835 flags.go:64] FLAG: --authentication-token-webhook="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.915790 4835 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.915797 4835 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.915802 4835 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.915807 4835 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.915811 4835 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918660 4835 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918669 4835 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918674 4835 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918678 4835 flags.go:64] FLAG: --cgroup-root="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918682 4835 flags.go:64] FLAG: --cgroups-per-qos="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918687 4835 flags.go:64] FLAG: --client-ca-file="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918691 4835 flags.go:64] FLAG: --cloud-config="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918695 4835 flags.go:64] FLAG: --cloud-provider="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918698 4835 flags.go:64] FLAG: --cluster-dns="[]" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918706 4835 flags.go:64] FLAG: --cluster-domain="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918710 4835 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918715 4835 flags.go:64] FLAG: --config-dir="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918719 4835 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918724 4835 flags.go:64] FLAG: --container-log-max-files="5" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918729 4835 flags.go:64] FLAG: --container-log-max-size="10Mi" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918733 4835 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918738 4835 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918742 4835 flags.go:64] FLAG: --containerd-namespace="k8s.io" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918752 4835 flags.go:64] FLAG: --contention-profiling="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918756 4835 flags.go:64] FLAG: --cpu-cfs-quota="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918760 4835 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918765 4835 flags.go:64] FLAG: --cpu-manager-policy="none" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918769 4835 flags.go:64] FLAG: --cpu-manager-policy-options="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918774 4835 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918778 4835 flags.go:64] FLAG: --enable-controller-attach-detach="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918781 4835 flags.go:64] FLAG: --enable-debugging-handlers="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918786 4835 flags.go:64] FLAG: --enable-load-reader="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918790 4835 flags.go:64] FLAG: --enable-server="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918794 4835 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918802 4835 flags.go:64] FLAG: --event-burst="100" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918807 4835 flags.go:64] FLAG: --event-qps="50" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918810 4835 flags.go:64] FLAG: --event-storage-age-limit="default=0" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918815 4835 flags.go:64] FLAG: --event-storage-event-limit="default=0" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918819 4835 flags.go:64] FLAG: --eviction-hard="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918824 4835 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918828 4835 flags.go:64] FLAG: --eviction-minimum-reclaim="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918832 4835 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918836 4835 flags.go:64] FLAG: --eviction-soft="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918840 4835 flags.go:64] FLAG: --eviction-soft-grace-period="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918844 4835 flags.go:64] FLAG: --exit-on-lock-contention="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918848 4835 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918851 4835 flags.go:64] FLAG: --experimental-mounter-path="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918855 4835 flags.go:64] FLAG: --fail-cgroupv1="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918860 4835 flags.go:64] FLAG: --fail-swap-on="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918863 4835 flags.go:64] FLAG: --feature-gates="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918869 4835 flags.go:64] FLAG: --file-check-frequency="20s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918873 4835 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918877 4835 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918881 4835 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918886 4835 flags.go:64] FLAG: --healthz-port="10248" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918889 4835 flags.go:64] FLAG: --help="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918893 4835 flags.go:64] FLAG: --hostname-override="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918897 4835 flags.go:64] FLAG: --housekeeping-interval="10s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918902 4835 flags.go:64] FLAG: --http-check-frequency="20s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918911 4835 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918915 4835 flags.go:64] FLAG: --image-credential-provider-config="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918919 4835 flags.go:64] FLAG: --image-gc-high-threshold="85" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918923 4835 flags.go:64] FLAG: --image-gc-low-threshold="80" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918927 4835 flags.go:64] FLAG: --image-service-endpoint="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918931 4835 flags.go:64] FLAG: --kernel-memcg-notification="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918935 4835 flags.go:64] FLAG: --kube-api-burst="100" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918939 4835 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918943 4835 flags.go:64] FLAG: --kube-api-qps="50" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918948 4835 flags.go:64] FLAG: --kube-reserved="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918952 4835 flags.go:64] FLAG: --kube-reserved-cgroup="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918956 4835 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918960 4835 flags.go:64] FLAG: --kubelet-cgroups="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918964 4835 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918968 4835 flags.go:64] FLAG: --lock-file="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918972 4835 flags.go:64] FLAG: --log-cadvisor-usage="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918976 4835 flags.go:64] FLAG: --log-flush-frequency="5s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918980 4835 flags.go:64] FLAG: --log-json-info-buffer-size="0" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918986 4835 flags.go:64] FLAG: --log-json-split-stream="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918990 4835 flags.go:64] FLAG: --log-text-info-buffer-size="0" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918993 4835 flags.go:64] FLAG: --log-text-split-stream="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.918997 4835 flags.go:64] FLAG: --logging-format="text" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919001 4835 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919005 4835 flags.go:64] FLAG: --make-iptables-util-chains="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919009 4835 flags.go:64] FLAG: --manifest-url="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919013 4835 flags.go:64] FLAG: --manifest-url-header="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919018 4835 flags.go:64] FLAG: --max-housekeeping-interval="15s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919023 4835 flags.go:64] FLAG: --max-open-files="1000000" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919027 4835 flags.go:64] FLAG: --max-pods="110" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919031 4835 flags.go:64] FLAG: --maximum-dead-containers="-1" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919035 4835 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919040 4835 flags.go:64] FLAG: --memory-manager-policy="None" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919044 4835 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919048 4835 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919052 4835 flags.go:64] FLAG: --node-ip="192.168.126.11" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919056 4835 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919071 4835 flags.go:64] FLAG: --node-status-max-images="50" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919076 4835 flags.go:64] FLAG: --node-status-update-frequency="10s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919080 4835 flags.go:64] FLAG: --oom-score-adj="-999" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919085 4835 flags.go:64] FLAG: --pod-cidr="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919089 4835 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919096 4835 flags.go:64] FLAG: --pod-manifest-path="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919100 4835 flags.go:64] FLAG: --pod-max-pids="-1" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919104 4835 flags.go:64] FLAG: --pods-per-core="0" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919108 4835 flags.go:64] FLAG: --port="10250" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919112 4835 flags.go:64] FLAG: --protect-kernel-defaults="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919116 4835 flags.go:64] FLAG: --provider-id="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919120 4835 flags.go:64] FLAG: --qos-reserved="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919124 4835 flags.go:64] FLAG: --read-only-port="10255" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919128 4835 flags.go:64] FLAG: --register-node="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919132 4835 flags.go:64] FLAG: --register-schedulable="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919136 4835 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919143 4835 flags.go:64] FLAG: --registry-burst="10" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919146 4835 flags.go:64] FLAG: --registry-qps="5" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919150 4835 flags.go:64] FLAG: --reserved-cpus="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919154 4835 flags.go:64] FLAG: --reserved-memory="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919160 4835 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919164 4835 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919168 4835 flags.go:64] FLAG: --rotate-certificates="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919172 4835 flags.go:64] FLAG: --rotate-server-certificates="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919176 4835 flags.go:64] FLAG: --runonce="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919180 4835 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919184 4835 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919190 4835 flags.go:64] FLAG: --seccomp-default="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919194 4835 flags.go:64] FLAG: --serialize-image-pulls="true" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919197 4835 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919202 4835 flags.go:64] FLAG: --storage-driver-db="cadvisor" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919206 4835 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919210 4835 flags.go:64] FLAG: --storage-driver-password="root" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919213 4835 flags.go:64] FLAG: --storage-driver-secure="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919217 4835 flags.go:64] FLAG: --storage-driver-table="stats" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919222 4835 flags.go:64] FLAG: --storage-driver-user="root" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919231 4835 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919236 4835 flags.go:64] FLAG: --sync-frequency="1m0s" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919240 4835 flags.go:64] FLAG: --system-cgroups="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919243 4835 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919250 4835 flags.go:64] FLAG: --system-reserved-cgroup="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919254 4835 flags.go:64] FLAG: --tls-cert-file="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919258 4835 flags.go:64] FLAG: --tls-cipher-suites="[]" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919272 4835 flags.go:64] FLAG: --tls-min-version="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919287 4835 flags.go:64] FLAG: --tls-private-key-file="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919291 4835 flags.go:64] FLAG: --topology-manager-policy="none" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919295 4835 flags.go:64] FLAG: --topology-manager-policy-options="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919299 4835 flags.go:64] FLAG: --topology-manager-scope="container" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919303 4835 flags.go:64] FLAG: --v="2" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919309 4835 flags.go:64] FLAG: --version="false" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919314 4835 flags.go:64] FLAG: --vmodule="" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919319 4835 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.919323 4835 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922720 4835 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922738 4835 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922743 4835 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922750 4835 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922755 4835 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922759 4835 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922764 4835 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922769 4835 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922773 4835 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922778 4835 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922784 4835 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922792 4835 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922797 4835 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922802 4835 feature_gate.go:330] unrecognized feature gate: Example Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922806 4835 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922811 4835 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922815 4835 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922819 4835 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922823 4835 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922828 4835 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922832 4835 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922836 4835 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922841 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922845 4835 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922850 4835 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922855 4835 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922860 4835 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922865 4835 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922870 4835 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922874 4835 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922884 4835 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922888 4835 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922891 4835 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922895 4835 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922899 4835 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922904 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922908 4835 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922911 4835 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922915 4835 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922918 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922922 4835 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922926 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922930 4835 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922933 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922937 4835 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922942 4835 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922947 4835 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922952 4835 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922955 4835 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922959 4835 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922963 4835 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922968 4835 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922971 4835 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922975 4835 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922979 4835 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922982 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922986 4835 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922989 4835 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922993 4835 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.922996 4835 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.923001 4835 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.923005 4835 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.923009 4835 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.923012 4835 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.923017 4835 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.923020 4835 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.923024 4835 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.923027 4835 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.923031 4835 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.923034 4835 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.923037 4835 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.923044 4835 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.936688 4835 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.936763 4835 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.936901 4835 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.936919 4835 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.936930 4835 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.936940 4835 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.936949 4835 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.936960 4835 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.936972 4835 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.936985 4835 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.936996 4835 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937005 4835 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937014 4835 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937025 4835 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937036 4835 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937045 4835 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937054 4835 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937062 4835 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937070 4835 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937078 4835 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937086 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937095 4835 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937103 4835 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937111 4835 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937119 4835 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937128 4835 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937136 4835 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937144 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937152 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937160 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937168 4835 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937177 4835 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937186 4835 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937194 4835 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937202 4835 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937211 4835 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937220 4835 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937228 4835 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937237 4835 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937249 4835 feature_gate.go:330] unrecognized feature gate: Example Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937259 4835 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937299 4835 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937309 4835 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937318 4835 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937328 4835 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937336 4835 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937346 4835 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937354 4835 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937363 4835 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937370 4835 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937378 4835 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937386 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937395 4835 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937406 4835 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937416 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937425 4835 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937433 4835 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937441 4835 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937450 4835 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937459 4835 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937468 4835 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937476 4835 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937484 4835 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937492 4835 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937500 4835 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937508 4835 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937516 4835 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937524 4835 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937531 4835 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937539 4835 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937547 4835 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937558 4835 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937567 4835 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.937581 4835 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937833 4835 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937848 4835 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937859 4835 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937868 4835 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937877 4835 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937885 4835 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937893 4835 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937904 4835 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937915 4835 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937925 4835 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937934 4835 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937943 4835 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937952 4835 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937961 4835 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937969 4835 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937977 4835 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937985 4835 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.937994 4835 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938003 4835 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938012 4835 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938021 4835 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938029 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938037 4835 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938045 4835 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938053 4835 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938061 4835 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938069 4835 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938077 4835 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938085 4835 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938094 4835 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938102 4835 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938110 4835 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938117 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938125 4835 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938134 4835 feature_gate.go:330] unrecognized feature gate: Example Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938142 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938149 4835 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938158 4835 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938166 4835 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938174 4835 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938183 4835 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938193 4835 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938232 4835 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938241 4835 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938250 4835 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938258 4835 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938268 4835 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938302 4835 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938312 4835 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938321 4835 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938330 4835 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938339 4835 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938346 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938355 4835 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938363 4835 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938372 4835 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938380 4835 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938388 4835 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938396 4835 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938403 4835 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938411 4835 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938420 4835 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938427 4835 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938435 4835 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938443 4835 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938451 4835 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938459 4835 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938467 4835 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938475 4835 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938482 4835 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 16:50:08 crc kubenswrapper[4835]: W0202 16:50:08.938489 4835 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.938502 4835 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.940043 4835 server.go:940] "Client rotation is on, will bootstrap in background" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.946803 4835 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.946962 4835 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.948777 4835 server.go:997] "Starting client certificate rotation" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.948830 4835 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.949113 4835 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-18 06:16:08.653794587 +0000 UTC Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.949261 4835 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.981603 4835 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 16:50:08 crc kubenswrapper[4835]: I0202 16:50:08.985073 4835 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 16:50:08 crc kubenswrapper[4835]: E0202 16:50:08.988249 4835 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.005518 4835 log.go:25] "Validated CRI v1 runtime API" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.048758 4835 log.go:25] "Validated CRI v1 image API" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.051223 4835 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.058873 4835 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-02-02-16-45-47-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.058918 4835 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.089739 4835 manager.go:217] Machine: {Timestamp:2026-02-02 16:50:09.086593922 +0000 UTC m=+0.708198082 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:a9bea5c4-1a52-47fb-a314-7115c7964a56 BootID:827d890b-2331-413c-aedb-9de5a54d9bc1 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:d8:2d:69 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:d8:2d:69 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:1a:1d:1d Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:3b:3a:ab Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:bb:0e:a1 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:b3:2d:a6 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:71:d5:f9 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:3e:fd:dc:bf:cb:43 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:1e:ca:9a:3e:be:8b Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.090211 4835 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.090443 4835 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.090973 4835 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.091402 4835 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.091467 4835 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.091789 4835 topology_manager.go:138] "Creating topology manager with none policy" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.091811 4835 container_manager_linux.go:303] "Creating device plugin manager" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.092555 4835 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.092631 4835 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.092849 4835 state_mem.go:36] "Initialized new in-memory state store" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.093009 4835 server.go:1245] "Using root directory" path="/var/lib/kubelet" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.100007 4835 kubelet.go:418] "Attempting to sync node with API server" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.100052 4835 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.100101 4835 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.100124 4835 kubelet.go:324] "Adding apiserver pod source" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.100143 4835 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.106770 4835 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.109787 4835 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Feb 02 16:50:09 crc kubenswrapper[4835]: W0202 16:50:09.110372 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:09 crc kubenswrapper[4835]: W0202 16:50:09.110462 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.110513 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.110575 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.111790 4835 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114132 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114182 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114197 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114210 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114232 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114245 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114259 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114311 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114328 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114342 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114361 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.114375 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.116333 4835 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.116878 4835 server.go:1280] "Started kubelet" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.117049 4835 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.117876 4835 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Feb 02 16:50:09 crc systemd[1]: Started Kubernetes Kubelet. Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.119173 4835 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.120844 4835 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.123208 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.123333 4835 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.123745 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 08:48:15.234428481 +0000 UTC Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.123851 4835 volume_manager.go:287] "The desired_state_of_world populator starts" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.126202 4835 volume_manager.go:289] "Starting Kubelet Volume Manager" Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.126538 4835 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.123913 4835 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Feb 02 16:50:09 crc kubenswrapper[4835]: W0202 16:50:09.127295 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.127701 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.129244 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" interval="200ms" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.129709 4835 server.go:460] "Adding debug handlers to kubelet server" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.130000 4835 factory.go:55] Registering systemd factory Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.130030 4835 factory.go:221] Registration of the systemd container factory successfully Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.131288 4835 factory.go:153] Registering CRI-O factory Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.131386 4835 factory.go:221] Registration of the crio container factory successfully Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.131533 4835 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.131653 4835 factory.go:103] Registering Raw factory Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.131744 4835 manager.go:1196] Started watching for new ooms in manager Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.132463 4835 manager.go:319] Starting recovery of all containers Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.131048 4835 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.245:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.18907c03c0bb0f36 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 16:50:09.116835638 +0000 UTC m=+0.738439728,LastTimestamp:2026-02-02 16:50:09.116835638 +0000 UTC m=+0.738439728,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.145391 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.145567 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.145650 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.145727 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.145814 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.145886 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.145969 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.146047 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.146124 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.149641 4835 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.149794 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.149915 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.150009 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.150148 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.150226 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.150329 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.150418 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.150511 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.150591 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.150669 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.150747 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.150825 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.150905 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.151003 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.151113 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.151244 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.151342 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.151435 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.151527 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.151609 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.151688 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.151766 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.151848 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.151931 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152012 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152099 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152175 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152255 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152375 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152453 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152536 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152618 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152693 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152773 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152855 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.152931 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.153009 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.153087 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.153191 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.153986 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154048 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154081 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154112 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154156 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154193 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154303 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154346 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154378 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154405 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154432 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154458 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154488 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154518 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154552 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154580 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154612 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154638 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154667 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154694 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154721 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154748 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154780 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154809 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154836 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154863 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154890 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154920 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154947 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.154974 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155004 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155032 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155063 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155128 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155155 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155183 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155209 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155234 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155262 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155355 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155385 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155414 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155442 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155469 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155494 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155522 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155551 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155577 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155605 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155631 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155658 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155685 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155760 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155790 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155821 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155847 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155889 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155924 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155957 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.155987 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156018 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156047 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156082 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156111 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156149 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156180 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156205 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156232 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156259 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156447 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156479 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156504 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156530 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156558 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156584 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156608 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156633 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156659 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156687 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156713 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156740 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156764 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156818 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156843 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156872 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156897 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156923 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156947 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156972 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.156997 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157024 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157051 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157077 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157104 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157131 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157159 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157188 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157216 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157244 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157270 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157331 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157357 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157386 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157410 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157436 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157461 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157485 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157513 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157539 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157563 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157594 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157621 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157647 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157672 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157697 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157722 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157747 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157775 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157802 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157826 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157849 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157876 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157901 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157924 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.157950 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159409 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159441 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159463 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159484 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159505 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159525 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159545 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159564 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159585 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159605 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159627 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159649 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159681 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159701 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159722 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159741 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159760 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159781 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159800 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159820 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159859 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159881 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159900 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159921 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159940 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.159960 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.160616 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.160651 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.160672 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.160691 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.160713 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.160735 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.161247 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.161333 4835 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.161353 4835 reconstruct.go:97] "Volume reconstruction finished" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.161369 4835 reconciler.go:26] "Reconciler: start to sync state" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.169265 4835 manager.go:324] Recovery completed Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.182299 4835 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.184439 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.186328 4835 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.186516 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.186556 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.186569 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.187543 4835 status_manager.go:217] "Starting to sync pod status with apiserver" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.187607 4835 kubelet.go:2335] "Starting kubelet main sync loop" Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.187681 4835 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.188178 4835 cpu_manager.go:225] "Starting CPU manager" policy="none" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.188338 4835 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Feb 02 16:50:09 crc kubenswrapper[4835]: W0202 16:50:09.188243 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.188436 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.188476 4835 state_mem.go:36] "Initialized new in-memory state store" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.204827 4835 policy_none.go:49] "None policy: Start" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.205766 4835 memory_manager.go:170] "Starting memorymanager" policy="None" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.205802 4835 state_mem.go:35] "Initializing new in-memory state store" Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.227696 4835 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.265713 4835 manager.go:334] "Starting Device Plugin manager" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.265962 4835 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.266080 4835 server.go:79] "Starting device plugin registration server" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.266768 4835 eviction_manager.go:189] "Eviction manager: starting control loop" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.266953 4835 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.267289 4835 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.267520 4835 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.267664 4835 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.276001 4835 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.288141 4835 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.288420 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.289698 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.289823 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.289955 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.290188 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.290383 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.290453 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.292207 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.292345 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.292468 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.292397 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.292624 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.292639 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.292861 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.293040 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.293136 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.296800 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.296830 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.296841 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.296894 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.296922 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.296934 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.297109 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.297358 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.297441 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.298000 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.298032 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.298044 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.298172 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.298328 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.298454 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.298628 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.298658 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.298670 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.299480 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.299510 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.299522 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.299952 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.299975 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.299986 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.300151 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.300185 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.301058 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.301125 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.301199 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.331011 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" interval="400ms" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.363678 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.363709 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.363729 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.363746 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.363762 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.363780 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.363897 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.364039 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.364127 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.364182 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.364215 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.364245 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.364307 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.364340 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.364368 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.367460 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.369324 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.369364 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.369376 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.369405 4835 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.369925 4835 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.245:6443: connect: connection refused" node="crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466105 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466356 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466374 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466295 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466457 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466474 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466487 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466524 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466522 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466577 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466627 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466630 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466692 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466660 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466658 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466714 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466775 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466810 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466733 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466896 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466963 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466967 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466986 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.466999 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.467012 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.467022 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.467087 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.467158 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.467044 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.467347 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.570088 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.571593 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.571637 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.571649 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.571682 4835 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.572206 4835 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.245:6443: connect: connection refused" node="crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.623832 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.651624 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.669652 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: W0202 16:50:09.671239 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-5b1064a4adcdef5dd2c064934df59e6d8d6ddbca4c0ebd9448458f296b60a5c5 WatchSource:0}: Error finding container 5b1064a4adcdef5dd2c064934df59e6d8d6ddbca4c0ebd9448458f296b60a5c5: Status 404 returned error can't find the container with id 5b1064a4adcdef5dd2c064934df59e6d8d6ddbca4c0ebd9448458f296b60a5c5 Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.678504 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.684955 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 16:50:09 crc kubenswrapper[4835]: W0202 16:50:09.686141 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-78eb12a7bb92032c0896647fe847b0a59491ebd7528cf0fbe699fb6dc4699024 WatchSource:0}: Error finding container 78eb12a7bb92032c0896647fe847b0a59491ebd7528cf0fbe699fb6dc4699024: Status 404 returned error can't find the container with id 78eb12a7bb92032c0896647fe847b0a59491ebd7528cf0fbe699fb6dc4699024 Feb 02 16:50:09 crc kubenswrapper[4835]: W0202 16:50:09.689728 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-9ae3f43fc47a0aac2fae3d273187fc277871f7e577840d0b00a1af769e2cc46a WatchSource:0}: Error finding container 9ae3f43fc47a0aac2fae3d273187fc277871f7e577840d0b00a1af769e2cc46a: Status 404 returned error can't find the container with id 9ae3f43fc47a0aac2fae3d273187fc277871f7e577840d0b00a1af769e2cc46a Feb 02 16:50:09 crc kubenswrapper[4835]: W0202 16:50:09.696556 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-3f7eb1dffc0a3f76166ab6106f670db2aeeb912384a0075a83b7356f173244e9 WatchSource:0}: Error finding container 3f7eb1dffc0a3f76166ab6106f670db2aeeb912384a0075a83b7356f173244e9: Status 404 returned error can't find the container with id 3f7eb1dffc0a3f76166ab6106f670db2aeeb912384a0075a83b7356f173244e9 Feb 02 16:50:09 crc kubenswrapper[4835]: W0202 16:50:09.714034 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-0c0983fc6c85674ea3fd8c8ca0b94756ceab56259095b3d585f16fa741c47492 WatchSource:0}: Error finding container 0c0983fc6c85674ea3fd8c8ca0b94756ceab56259095b3d585f16fa741c47492: Status 404 returned error can't find the container with id 0c0983fc6c85674ea3fd8c8ca0b94756ceab56259095b3d585f16fa741c47492 Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.732457 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" interval="800ms" Feb 02 16:50:09 crc kubenswrapper[4835]: W0202 16:50:09.916989 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.917089 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.973260 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.974526 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.974592 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.974618 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:09 crc kubenswrapper[4835]: I0202 16:50:09.974664 4835 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 16:50:09 crc kubenswrapper[4835]: E0202 16:50:09.975169 4835 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.245:6443: connect: connection refused" node="crc" Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.122884 4835 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.127121 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 09:45:18.065397886 +0000 UTC Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.191181 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3f7eb1dffc0a3f76166ab6106f670db2aeeb912384a0075a83b7356f173244e9"} Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.192289 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9ae3f43fc47a0aac2fae3d273187fc277871f7e577840d0b00a1af769e2cc46a"} Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.193944 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"78eb12a7bb92032c0896647fe847b0a59491ebd7528cf0fbe699fb6dc4699024"} Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.194733 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"5b1064a4adcdef5dd2c064934df59e6d8d6ddbca4c0ebd9448458f296b60a5c5"} Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.195735 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0c0983fc6c85674ea3fd8c8ca0b94756ceab56259095b3d585f16fa741c47492"} Feb 02 16:50:10 crc kubenswrapper[4835]: E0202 16:50:10.534083 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" interval="1.6s" Feb 02 16:50:10 crc kubenswrapper[4835]: W0202 16:50:10.624249 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:10 crc kubenswrapper[4835]: E0202 16:50:10.624449 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:10 crc kubenswrapper[4835]: W0202 16:50:10.625763 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:10 crc kubenswrapper[4835]: E0202 16:50:10.625845 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:10 crc kubenswrapper[4835]: W0202 16:50:10.658455 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:10 crc kubenswrapper[4835]: E0202 16:50:10.658856 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.775359 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.777314 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.777353 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.777367 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:10 crc kubenswrapper[4835]: I0202 16:50:10.777391 4835 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 16:50:10 crc kubenswrapper[4835]: E0202 16:50:10.777807 4835 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.245:6443: connect: connection refused" node="crc" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.123531 4835 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.127264 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 15:12:57.679376182 +0000 UTC Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.156546 4835 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 16:50:11 crc kubenswrapper[4835]: E0202 16:50:11.158138 4835 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.200893 4835 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="692dc63673981310457321880880b8f84b7e7935e0916e98f497e1e26efd2df5" exitCode=0 Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.200958 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"692dc63673981310457321880880b8f84b7e7935e0916e98f497e1e26efd2df5"} Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.200998 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.202450 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.202502 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.202521 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.204039 4835 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472" exitCode=0 Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.204150 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472"} Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.204229 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.205527 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.205559 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.205570 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.209915 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141"} Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.209948 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5"} Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.209959 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d"} Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.209967 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4"} Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.210111 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.211378 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.211412 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.211424 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.212806 4835 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac" exitCode=0 Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.212909 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac"} Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.212965 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.214233 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.214263 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.214293 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.215401 4835 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92" exitCode=0 Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.215458 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92"} Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.215505 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.216222 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.216253 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.216264 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.218137 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.222742 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.222797 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:11 crc kubenswrapper[4835]: I0202 16:50:11.222823 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.123079 4835 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.127481 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 09:57:15.348162225 +0000 UTC Feb 02 16:50:12 crc kubenswrapper[4835]: E0202 16:50:12.135237 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" interval="3.2s" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.226011 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d"} Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.226062 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496"} Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.226078 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153"} Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.226090 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad"} Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.228245 4835 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c" exitCode=0 Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.228314 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c"} Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.228387 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.229365 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.229434 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.229451 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.230038 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.230028 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"1f070537f6e3b1cf3063217468831340e228c08e517afcce52effc5dddad84bf"} Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.230796 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.230813 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.230821 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.241102 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.241074 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ac865c5450ef4d9e733ccbd390c1ef55d6049278ff6174dad3bd16bdfa57e037"} Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.241185 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.241308 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"02837b8c6c3c8ccd492b47276b8b7fc513a584d29d7c3d686f7104458663c9e3"} Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.242307 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"43653581ec05560f58ac4c3d03ff9f5f9e105627add9e4f56026b14662543960"} Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.244565 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.244571 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.244626 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.244638 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.244648 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.244647 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:12 crc kubenswrapper[4835]: W0202 16:50:12.251432 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:12 crc kubenswrapper[4835]: E0202 16:50:12.251552 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:12 crc kubenswrapper[4835]: W0202 16:50:12.269933 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:12 crc kubenswrapper[4835]: E0202 16:50:12.270022 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:12 crc kubenswrapper[4835]: W0202 16:50:12.297680 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.245:6443: connect: connection refused Feb 02 16:50:12 crc kubenswrapper[4835]: E0202 16:50:12.297759 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.245:6443: connect: connection refused" logger="UnhandledError" Feb 02 16:50:12 crc kubenswrapper[4835]: E0202 16:50:12.371545 4835 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.245:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.18907c03c0bb0f36 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 16:50:09.116835638 +0000 UTC m=+0.738439728,LastTimestamp:2026-02-02 16:50:09.116835638 +0000 UTC m=+0.738439728,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.377908 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.379013 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.379042 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.379053 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:12 crc kubenswrapper[4835]: I0202 16:50:12.379076 4835 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 16:50:12 crc kubenswrapper[4835]: E0202 16:50:12.379461 4835 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.245:6443: connect: connection refused" node="crc" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.128164 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 03:05:39.511006639 +0000 UTC Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.246723 4835 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b" exitCode=0 Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.246824 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b"} Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.246913 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.248616 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.248670 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.248688 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.251946 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d"} Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.252045 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.252137 4835 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.252209 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.252142 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.253568 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.253601 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.253610 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.253564 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.253653 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.253670 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.254296 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.254336 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:13 crc kubenswrapper[4835]: I0202 16:50:13.254346 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.128425 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 04:19:05.997483122 +0000 UTC Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.260079 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.260121 4835 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.260165 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.260089 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c"} Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.260814 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93"} Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.260933 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967"} Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.261021 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899"} Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.261103 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019"} Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.261475 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.261637 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.261728 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.261755 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.261915 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.261935 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.884989 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.885220 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.886791 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.886829 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:14 crc kubenswrapper[4835]: I0202 16:50:14.886838 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:15 crc kubenswrapper[4835]: I0202 16:50:15.129024 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 03:37:25.906094123 +0000 UTC Feb 02 16:50:15 crc kubenswrapper[4835]: I0202 16:50:15.263369 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:15 crc kubenswrapper[4835]: I0202 16:50:15.265235 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:15 crc kubenswrapper[4835]: I0202 16:50:15.265322 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:15 crc kubenswrapper[4835]: I0202 16:50:15.265342 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:15 crc kubenswrapper[4835]: I0202 16:50:15.304317 4835 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 16:50:15 crc kubenswrapper[4835]: I0202 16:50:15.580496 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:15 crc kubenswrapper[4835]: I0202 16:50:15.581963 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:15 crc kubenswrapper[4835]: I0202 16:50:15.582023 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:15 crc kubenswrapper[4835]: I0202 16:50:15.582039 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:15 crc kubenswrapper[4835]: I0202 16:50:15.582071 4835 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 16:50:16 crc kubenswrapper[4835]: I0202 16:50:16.129401 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 19:55:55.770822281 +0000 UTC Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.129656 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 09:30:07.168580501 +0000 UTC Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.138148 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.138345 4835 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.138399 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.140351 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.140443 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.140487 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.222390 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.222615 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.224394 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.224474 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.224503 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.363562 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.363835 4835 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.363896 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.365976 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.366017 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.366031 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.469728 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.506654 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.506973 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.510019 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.510090 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.510116 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:17 crc kubenswrapper[4835]: I0202 16:50:17.514217 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.130373 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.130431 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 16:08:19.903199697 +0000 UTC Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.269850 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.269919 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.270019 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.271098 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.271134 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.271145 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.271478 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.271566 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.271589 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.323388 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.323611 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.325106 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.325161 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:18 crc kubenswrapper[4835]: I0202 16:50:18.325183 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:19 crc kubenswrapper[4835]: I0202 16:50:19.130938 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 05:59:52.358091699 +0000 UTC Feb 02 16:50:19 crc kubenswrapper[4835]: I0202 16:50:19.197572 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Feb 02 16:50:19 crc kubenswrapper[4835]: I0202 16:50:19.272393 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:19 crc kubenswrapper[4835]: I0202 16:50:19.272707 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:19 crc kubenswrapper[4835]: I0202 16:50:19.273775 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:19 crc kubenswrapper[4835]: I0202 16:50:19.273835 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:19 crc kubenswrapper[4835]: I0202 16:50:19.273853 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:19 crc kubenswrapper[4835]: I0202 16:50:19.274851 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:19 crc kubenswrapper[4835]: I0202 16:50:19.274914 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:19 crc kubenswrapper[4835]: I0202 16:50:19.274937 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:19 crc kubenswrapper[4835]: E0202 16:50:19.276406 4835 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 16:50:20 crc kubenswrapper[4835]: I0202 16:50:20.131399 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 05:44:01.947423154 +0000 UTC Feb 02 16:50:21 crc kubenswrapper[4835]: I0202 16:50:21.131413 4835 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 16:50:21 crc kubenswrapper[4835]: I0202 16:50:21.131535 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 16:50:21 crc kubenswrapper[4835]: I0202 16:50:21.131713 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 15:40:00.08245254 +0000 UTC Feb 02 16:50:22 crc kubenswrapper[4835]: I0202 16:50:22.131964 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 20:39:15.162293966 +0000 UTC Feb 02 16:50:22 crc kubenswrapper[4835]: I0202 16:50:22.925454 4835 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:57018->192.168.126.11:17697: read: connection reset by peer" start-of-body= Feb 02 16:50:22 crc kubenswrapper[4835]: I0202 16:50:22.925743 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:57018->192.168.126.11:17697: read: connection reset by peer" Feb 02 16:50:23 crc kubenswrapper[4835]: W0202 16:50:23.075165 4835 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.075316 4835 trace.go:236] Trace[194818503]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 16:50:13.073) (total time: 10002ms): Feb 02 16:50:23 crc kubenswrapper[4835]: Trace[194818503]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (16:50:23.075) Feb 02 16:50:23 crc kubenswrapper[4835]: Trace[194818503]: [10.002172946s] [10.002172946s] END Feb 02 16:50:23 crc kubenswrapper[4835]: E0202 16:50:23.075349 4835 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.124638 4835 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.132945 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 21:34:20.953011276 +0000 UTC Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.212103 4835 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.212533 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.217985 4835 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.218213 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.285017 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.286682 4835 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d" exitCode=255 Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.286721 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d"} Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.286834 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.287594 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.287620 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.287629 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:23 crc kubenswrapper[4835]: I0202 16:50:23.288089 4835 scope.go:117] "RemoveContainer" containerID="7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d" Feb 02 16:50:24 crc kubenswrapper[4835]: I0202 16:50:24.133553 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 09:15:59.330364782 +0000 UTC Feb 02 16:50:24 crc kubenswrapper[4835]: I0202 16:50:24.292076 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 16:50:24 crc kubenswrapper[4835]: I0202 16:50:24.294420 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9"} Feb 02 16:50:24 crc kubenswrapper[4835]: I0202 16:50:24.294631 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:24 crc kubenswrapper[4835]: I0202 16:50:24.295836 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:24 crc kubenswrapper[4835]: I0202 16:50:24.295881 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:24 crc kubenswrapper[4835]: I0202 16:50:24.295895 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:25 crc kubenswrapper[4835]: I0202 16:50:25.134371 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 07:11:53.1337141 +0000 UTC Feb 02 16:50:26 crc kubenswrapper[4835]: I0202 16:50:26.134670 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 11:21:08.839758446 +0000 UTC Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.135619 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 22:23:53.559482802 +0000 UTC Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.144210 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.144506 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.144588 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.146378 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.146424 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.146436 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.151048 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.303842 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.305510 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.305577 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:27 crc kubenswrapper[4835]: I0202 16:50:27.305600 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.136720 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 08:12:23.547688363 +0000 UTC Feb 02 16:50:28 crc kubenswrapper[4835]: E0202 16:50:28.206868 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Feb 02 16:50:28 crc kubenswrapper[4835]: E0202 16:50:28.208731 4835 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.210969 4835 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.210990 4835 trace.go:236] Trace[454159453]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 16:50:17.210) (total time: 11000ms): Feb 02 16:50:28 crc kubenswrapper[4835]: Trace[454159453]: ---"Objects listed" error: 11000ms (16:50:28.210) Feb 02 16:50:28 crc kubenswrapper[4835]: Trace[454159453]: [11.000728493s] [11.000728493s] END Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.211075 4835 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.211164 4835 trace.go:236] Trace[263075263]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 16:50:17.057) (total time: 11153ms): Feb 02 16:50:28 crc kubenswrapper[4835]: Trace[263075263]: ---"Objects listed" error: 11153ms (16:50:28.211) Feb 02 16:50:28 crc kubenswrapper[4835]: Trace[263075263]: [11.153782674s] [11.153782674s] END Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.211181 4835 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.211163 4835 trace.go:236] Trace[199774968]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 16:50:16.934) (total time: 11277ms): Feb 02 16:50:28 crc kubenswrapper[4835]: Trace[199774968]: ---"Objects listed" error: 11276ms (16:50:28.211) Feb 02 16:50:28 crc kubenswrapper[4835]: Trace[199774968]: [11.277031008s] [11.277031008s] END Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.211227 4835 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.215148 4835 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.248804 4835 csr.go:261] certificate signing request csr-b6lr2 is approved, waiting to be issued Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.260214 4835 csr.go:257] certificate signing request csr-b6lr2 is issued Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.269688 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.274383 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.274453 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:28 crc kubenswrapper[4835]: I0202 16:50:28.949217 4835 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Feb 02 16:50:28 crc kubenswrapper[4835]: W0202 16:50:28.949483 4835 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.CSIDriver ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Feb 02 16:50:28 crc kubenswrapper[4835]: W0202 16:50:28.949480 4835 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Service ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Feb 02 16:50:28 crc kubenswrapper[4835]: W0202 16:50:28.949494 4835 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Node ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Feb 02 16:50:28 crc kubenswrapper[4835]: E0202 16:50:28.949443 4835 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/events\": read tcp 38.102.83.245:53826->38.102.83.245:6443: use of closed network connection" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.18907c03e247a553 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 16:50:09.679697235 +0000 UTC m=+1.301301335,LastTimestamp:2026-02-02 16:50:09.679697235 +0000 UTC m=+1.301301335,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.111888 4835 apiserver.go:52] "Watching apiserver" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.118335 4835 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.118756 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.119131 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.119143 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.119313 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.119765 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.119858 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.119875 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.120016 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.120115 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.120296 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.121077 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.121828 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.122063 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.122090 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.122175 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.122306 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.122351 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.122423 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.123236 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.127528 4835 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.136800 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 01:12:54.394656272 +0000 UTC Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.141694 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.169365 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.183278 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.193691 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.206192 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218448 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218509 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218537 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218568 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218595 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218613 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218628 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218644 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218665 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218681 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218695 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218714 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218758 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218774 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218802 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218800 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218817 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218845 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218877 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218895 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218971 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218996 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219009 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219016 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219065 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219095 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219115 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219135 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219152 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219169 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219214 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219212 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219230 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219247 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219266 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219283 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219311 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219327 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219342 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219357 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219376 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219390 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219406 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219413 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219423 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219438 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219452 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219468 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219485 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219519 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219534 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219532 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219551 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219566 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219582 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219599 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219613 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219629 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219644 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219659 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219675 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219711 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219726 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219729 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219742 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219769 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219811 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219829 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219843 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219854 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219878 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219896 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219912 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219928 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219945 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219964 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219980 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219994 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220008 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220022 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220038 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220054 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220072 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220086 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220101 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220115 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220131 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220165 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220181 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220197 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220214 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220230 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220246 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220262 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220282 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220312 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220329 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220346 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220364 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220379 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220399 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220417 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220436 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220452 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220468 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220486 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220501 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220519 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220536 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220553 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220570 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220585 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220601 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220617 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220633 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220649 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220665 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220682 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220699 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220715 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220730 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220745 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220761 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220776 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220794 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220835 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220855 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220873 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220889 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220905 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220921 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220937 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220959 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220983 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221009 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221033 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221056 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221079 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221100 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221116 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221132 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221148 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221165 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221183 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221200 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221218 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221234 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221250 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221267 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221286 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221345 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221362 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221378 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221394 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221410 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221425 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221445 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221464 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221483 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221502 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221530 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221548 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221563 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221579 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221594 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221611 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221629 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221646 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221663 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221679 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221696 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221713 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221732 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221750 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221767 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221783 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221801 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221818 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.218873 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221900 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221922 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221940 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221956 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221974 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221992 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222009 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222027 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222044 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222061 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222079 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222103 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222125 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222148 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222170 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222186 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222203 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222220 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222238 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222257 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222281 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222313 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222330 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222348 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222385 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222408 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222430 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222452 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222503 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222523 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222542 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222558 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222578 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222601 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222617 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222635 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222656 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222676 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222722 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222736 4835 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222747 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222757 4835 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222768 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222779 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222788 4835 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.219851 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220018 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220194 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220210 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220220 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220315 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220397 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220438 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220504 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.223090 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220657 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220778 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220797 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220841 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220998 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221123 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221523 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221689 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.221846 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222420 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222655 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222694 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222725 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.222894 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.220607 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.224848 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.224984 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.225199 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.225214 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.225231 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.225469 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.225533 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.225900 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.225921 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.225976 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.226234 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.226275 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.226312 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.226628 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.226786 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.226874 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.227097 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.227102 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.227278 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.227383 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.227412 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.227437 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.227646 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.227680 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.227756 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.227921 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.227998 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228022 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228020 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228242 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228249 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228325 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228399 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228517 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228615 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228614 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228684 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228691 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.228940 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.229129 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.229250 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.229502 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.229516 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.229672 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.231466 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.231595 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.231693 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.231863 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.231911 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.232427 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.232485 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.232944 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.233281 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.233622 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.233821 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.234009 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.234211 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.236866 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.237794 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.237838 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.238095 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.238116 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.238411 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.239042 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.239063 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.239109 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.239262 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.239928 4835 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.242784 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.242940 4835 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.242998 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:29.742979503 +0000 UTC m=+21.364583673 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.244790 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.244867 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.245131 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.245296 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.245861 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.246094 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.247032 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.247336 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.247735 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.248199 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.261592 4835 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-02-02 16:45:28 +0000 UTC, rotation deadline is 2026-11-27 00:00:21.792816651 +0000 UTC Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.261662 4835 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7135h9m52.531158929s for next certificate rotation Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.261650 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.261932 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.261971 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.262124 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.262346 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.262408 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.262434 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.262684 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.262715 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.262871 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.263364 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.263742 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.264004 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.265420 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.264485 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.265337 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.265519 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.265677 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.265747 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.265856 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.265740 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.265874 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.265874 4835 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.265921 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.265997 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:29.765973962 +0000 UTC m=+21.387578042 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.266136 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.266163 4835 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.266224 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:29.766200328 +0000 UTC m=+21.387804408 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.266434 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.266550 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.266628 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.266657 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.266681 4835 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.264496 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.261709 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.264984 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.265009 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.264754 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.265096 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.265248 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.266727 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:50:29.766716143 +0000 UTC m=+21.388320223 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.267146 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.267759 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:29.767744771 +0000 UTC m=+21.389348851 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.267918 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.268245 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.268590 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.270340 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.268645 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.264641 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.269118 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.269384 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.269387 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.269648 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.269728 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.269887 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.270353 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.270511 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.270572 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.270625 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.270660 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.270886 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.271573 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.270947 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.271104 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.271165 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.271318 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.271484 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.271495 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.271529 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.271904 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.272265 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.272408 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.272662 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.272829 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.273074 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.273193 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.273240 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.273331 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.273388 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.273506 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.273656 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.273660 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.274071 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.274397 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.274674 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.274727 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.274844 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.274914 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.275599 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.277128 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.278416 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.280639 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.281578 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.285264 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.285451 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.285609 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.285951 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.286051 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.286204 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.286325 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.286219 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.286537 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.289637 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.290094 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.296435 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.297932 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.299264 4835 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.302139 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.307182 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.308788 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.313934 4835 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.317542 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.322654 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323141 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323191 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323304 4835 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323324 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323327 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323338 4835 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323376 4835 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323388 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323402 4835 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323413 4835 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323426 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323438 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323450 4835 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323391 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.323461 4835 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324013 4835 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324045 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324059 4835 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324073 4835 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324089 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324104 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324116 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324129 4835 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324141 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324153 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324168 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324181 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324193 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324203 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324265 4835 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324296 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324310 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324323 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324338 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324351 4835 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324363 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324375 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324386 4835 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324398 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324446 4835 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324458 4835 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324470 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324482 4835 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324495 4835 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324508 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.324522 4835 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325119 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325137 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325151 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325164 4835 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325177 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325191 4835 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325204 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325216 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325228 4835 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325240 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325255 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325267 4835 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325318 4835 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325332 4835 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325344 4835 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325356 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325368 4835 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325382 4835 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325393 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325405 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325418 4835 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325429 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325441 4835 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325452 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325464 4835 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325476 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325489 4835 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325527 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325539 4835 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325551 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325563 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325575 4835 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325586 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325598 4835 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325608 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325636 4835 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325648 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325660 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325671 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325682 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325693 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325721 4835 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325733 4835 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325744 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325755 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325766 4835 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325777 4835 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325788 4835 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325800 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325811 4835 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325824 4835 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325841 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325854 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325865 4835 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325877 4835 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325890 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325903 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325917 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325931 4835 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325943 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325955 4835 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325966 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325978 4835 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.325991 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326003 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326015 4835 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326027 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326038 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326068 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326078 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326088 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326098 4835 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326109 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326121 4835 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326132 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326142 4835 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326153 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326165 4835 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326177 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326190 4835 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326202 4835 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326212 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326223 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326234 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326244 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326257 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326273 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326301 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326314 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326325 4835 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326336 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326348 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326360 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326371 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326385 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326397 4835 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326409 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326437 4835 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326449 4835 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326460 4835 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326471 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326482 4835 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326495 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326506 4835 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326518 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326530 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326541 4835 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326553 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326565 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326578 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326590 4835 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326602 4835 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326614 4835 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326624 4835 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326639 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326650 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326667 4835 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326678 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326692 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326705 4835 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326718 4835 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326728 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326738 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326749 4835 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326759 4835 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326769 4835 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326779 4835 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326789 4835 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326802 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326814 4835 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326825 4835 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326837 4835 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326849 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326860 4835 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326871 4835 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326882 4835 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326894 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326915 4835 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326929 4835 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326940 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326953 4835 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326963 4835 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326974 4835 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326986 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.326996 4835 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.337283 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.345860 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.358903 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.379672 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.394499 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.406037 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.417363 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.431573 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.431608 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.437367 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.444012 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.446725 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.475133 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.561337 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.599636 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.613628 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.645394 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.658071 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.667714 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.763507 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.763591 4835 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.763641 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:30.763627824 +0000 UTC m=+22.385231904 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.864224 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.864322 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.864355 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.864384 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.864485 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.864470 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:50:30.864436844 +0000 UTC m=+22.486040954 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.864509 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.864533 4835 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.864545 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.864566 4835 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.864586 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:30.864568808 +0000 UTC m=+22.486172958 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.864499 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.864628 4835 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.864636 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:30.864605389 +0000 UTC m=+22.486209559 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:29 crc kubenswrapper[4835]: E0202 16:50:29.864679 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:30.864664571 +0000 UTC m=+22.486268741 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.918038 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-jq8mv"] Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.918475 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-jq8mv" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.920404 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.920440 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.920869 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.930918 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.941282 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.950423 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.958910 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.965036 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/56088f67-2758-4fe2-b21a-fbf6f780c704-hosts-file\") pod \"node-resolver-jq8mv\" (UID: \"56088f67-2758-4fe2-b21a-fbf6f780c704\") " pod="openshift-dns/node-resolver-jq8mv" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.965139 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w72x6\" (UniqueName: \"kubernetes.io/projected/56088f67-2758-4fe2-b21a-fbf6f780c704-kube-api-access-w72x6\") pod \"node-resolver-jq8mv\" (UID: \"56088f67-2758-4fe2-b21a-fbf6f780c704\") " pod="openshift-dns/node-resolver-jq8mv" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.970003 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.985624 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:29 crc kubenswrapper[4835]: I0202 16:50:29.995142 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.005136 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.019514 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.029915 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.066351 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/56088f67-2758-4fe2-b21a-fbf6f780c704-hosts-file\") pod \"node-resolver-jq8mv\" (UID: \"56088f67-2758-4fe2-b21a-fbf6f780c704\") " pod="openshift-dns/node-resolver-jq8mv" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.066412 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w72x6\" (UniqueName: \"kubernetes.io/projected/56088f67-2758-4fe2-b21a-fbf6f780c704-kube-api-access-w72x6\") pod \"node-resolver-jq8mv\" (UID: \"56088f67-2758-4fe2-b21a-fbf6f780c704\") " pod="openshift-dns/node-resolver-jq8mv" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.066528 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/56088f67-2758-4fe2-b21a-fbf6f780c704-hosts-file\") pod \"node-resolver-jq8mv\" (UID: \"56088f67-2758-4fe2-b21a-fbf6f780c704\") " pod="openshift-dns/node-resolver-jq8mv" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.088543 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w72x6\" (UniqueName: \"kubernetes.io/projected/56088f67-2758-4fe2-b21a-fbf6f780c704-kube-api-access-w72x6\") pod \"node-resolver-jq8mv\" (UID: \"56088f67-2758-4fe2-b21a-fbf6f780c704\") " pod="openshift-dns/node-resolver-jq8mv" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.137575 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 12:48:41.004927264 +0000 UTC Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.188607 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.188734 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.188620 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.188807 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.230733 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-jq8mv" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.294862 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-q5dl9"] Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.295506 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-hzst6"] Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.295660 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.295669 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.296063 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-88n4w"] Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.296788 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-94jlf"] Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.297024 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.297028 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.297151 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.297253 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.297393 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.297912 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.298049 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.300205 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.300231 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.300501 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.300523 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.300569 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.300636 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.300697 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.300728 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.300797 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.300802 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.300973 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.301080 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.301306 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.304155 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.311997 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.315127 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143"} Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.315162 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"64374ccc2a4df4e0a9fdc64158f8b941e5d1a4c9260b6204a3bef8a117799497"} Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.318670 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396"} Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.318706 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2"} Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.318717 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"22f19579515b63c47ff193e635f8390a35e97f0d23888dbbceb3a7f79204f269"} Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.319650 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-jq8mv" event={"ID":"56088f67-2758-4fe2-b21a-fbf6f780c704","Type":"ContainerStarted","Data":"39a8125040ccb076d4747993cf593e9540bb7a31d623d0be327af2ec3d33e6db"} Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.323594 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d9eff6f25a1eb0aa8f55365362fe595814dbe398f10b7855e82cc8a1b197c38f"} Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.325702 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.336910 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.350003 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.360347 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369289 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-netns\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369331 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5bw5\" (UniqueName: \"kubernetes.io/projected/92da4528-a699-45b1-aed0-d49a382bf0a1-kube-api-access-z5bw5\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369351 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-kubelet\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369366 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-socket-dir-parent\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369386 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-conf-dir\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369405 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-etc-kubernetes\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369442 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-var-lib-kubelet\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369457 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-daemon-config\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369471 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-system-cni-dir\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369485 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-etc-openvswitch\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369499 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtvrr\" (UniqueName: \"kubernetes.io/projected/0cbaf0a8-c75d-4059-9874-d0a193090578-kube-api-access-xtvrr\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369520 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-script-lib\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369537 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-cni-dir\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369573 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-bin\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369588 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-env-overrides\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369602 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-ovn\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369616 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-cnibin\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369632 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-system-cni-dir\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369648 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-var-lib-cni-bin\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369667 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-systemd\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369681 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-node-log\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369696 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-netd\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369709 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/92da4528-a699-45b1-aed0-d49a382bf0a1-cni-binary-copy\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369739 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-var-lib-openvswitch\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369754 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-proxy-tls\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369770 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-ovn-kubernetes\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369785 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369800 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-systemd-units\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369821 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-rootfs\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369838 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvmfv\" (UniqueName: \"kubernetes.io/projected/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-kube-api-access-gvmfv\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369852 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-run-multus-certs\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369870 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369894 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6345da6a-11cd-4a06-8586-5ea1c5a70bca-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369915 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-config\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369939 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-os-release\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369953 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-var-lib-cni-multus\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.369972 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-hostroot\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.370000 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-slash\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.370017 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6345da6a-11cd-4a06-8586-5ea1c5a70bca-cni-binary-copy\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.370034 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpfxb\" (UniqueName: \"kubernetes.io/projected/6345da6a-11cd-4a06-8586-5ea1c5a70bca-kube-api-access-mpfxb\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.370062 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-cnibin\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.370082 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-run-k8s-cni-cncf-io\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.370103 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-run-netns\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.370126 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-os-release\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.370145 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-mcd-auth-proxy-config\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.370165 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-openvswitch\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.370184 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-log-socket\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.370205 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0cbaf0a8-c75d-4059-9874-d0a193090578-ovn-node-metrics-cert\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.379969 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.391138 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.404263 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.415442 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.427863 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.440120 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.452367 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.466223 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470476 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-system-cni-dir\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470512 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-var-lib-kubelet\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470535 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-daemon-config\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470555 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-etc-openvswitch\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470575 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtvrr\" (UniqueName: \"kubernetes.io/projected/0cbaf0a8-c75d-4059-9874-d0a193090578-kube-api-access-xtvrr\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470594 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-script-lib\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470614 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-cni-dir\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470632 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-bin\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470651 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-env-overrides\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470677 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-systemd\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470695 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-ovn\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470715 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-cnibin\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470737 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-system-cni-dir\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470756 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-var-lib-cni-bin\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470774 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-node-log\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470805 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-netd\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470827 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/92da4528-a699-45b1-aed0-d49a382bf0a1-cni-binary-copy\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470845 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-var-lib-openvswitch\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470865 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-proxy-tls\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470885 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-ovn-kubernetes\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470905 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-systemd-units\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470924 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470944 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470965 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6345da6a-11cd-4a06-8586-5ea1c5a70bca-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.470989 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-rootfs\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471009 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvmfv\" (UniqueName: \"kubernetes.io/projected/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-kube-api-access-gvmfv\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471028 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-run-multus-certs\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471057 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-slash\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471114 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-config\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471138 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-os-release\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471157 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-var-lib-cni-multus\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471152 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-system-cni-dir\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471182 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-hostroot\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471238 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-run-netns\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471315 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6345da6a-11cd-4a06-8586-5ea1c5a70bca-cni-binary-copy\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471335 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-var-lib-openvswitch\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471341 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpfxb\" (UniqueName: \"kubernetes.io/projected/6345da6a-11cd-4a06-8586-5ea1c5a70bca-kube-api-access-mpfxb\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471374 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-cnibin\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471409 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-run-k8s-cni-cncf-io\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471451 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-mcd-auth-proxy-config\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471473 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-os-release\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471494 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-openvswitch\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471515 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-log-socket\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471534 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0cbaf0a8-c75d-4059-9874-d0a193090578-ovn-node-metrics-cert\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471555 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-kubelet\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471576 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-netns\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471615 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5bw5\" (UniqueName: \"kubernetes.io/projected/92da4528-a699-45b1-aed0-d49a382bf0a1-kube-api-access-z5bw5\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471650 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-socket-dir-parent\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471672 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-conf-dir\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471691 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-etc-kubernetes\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471762 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-etc-kubernetes\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472039 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-etc-openvswitch\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472035 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-systemd\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472121 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-run-multus-certs\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472133 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-script-lib\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472155 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472176 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/92da4528-a699-45b1-aed0-d49a382bf0a1-cni-binary-copy\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472204 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-run-netns\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472195 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-node-log\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472238 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472223 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-ovn\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472260 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-netd\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472316 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-var-lib-kubelet\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472256 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-bin\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472177 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-cnibin\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472354 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-ovn-kubernetes\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472308 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-system-cni-dir\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472368 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-var-lib-cni-bin\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472255 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-daemon-config\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472227 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-cni-dir\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472413 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-log-socket\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472426 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-systemd-units\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472409 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-slash\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472434 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-rootfs\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472327 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-openvswitch\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.471244 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-hostroot\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472544 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-socket-dir-parent\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472612 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-netns\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472629 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-run-k8s-cni-cncf-io\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472660 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-kubelet\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472675 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-cnibin\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472720 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-multus-conf-dir\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472928 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-env-overrides\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472955 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6345da6a-11cd-4a06-8586-5ea1c5a70bca-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.472991 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-host-var-lib-cni-multus\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.473038 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6345da6a-11cd-4a06-8586-5ea1c5a70bca-os-release\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.473063 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-config\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.473088 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-mcd-auth-proxy-config\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.473120 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/92da4528-a699-45b1-aed0-d49a382bf0a1-os-release\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.473274 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6345da6a-11cd-4a06-8586-5ea1c5a70bca-cni-binary-copy\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.481754 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.492620 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.504830 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.518448 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.529460 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.544381 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.551945 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvmfv\" (UniqueName: \"kubernetes.io/projected/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-kube-api-access-gvmfv\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.552341 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d878a5fb-e7f6-4458-8bcc-119bf67ad45a-proxy-tls\") pod \"machine-config-daemon-94jlf\" (UID: \"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\") " pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.552470 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0cbaf0a8-c75d-4059-9874-d0a193090578-ovn-node-metrics-cert\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.552572 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtvrr\" (UniqueName: \"kubernetes.io/projected/0cbaf0a8-c75d-4059-9874-d0a193090578-kube-api-access-xtvrr\") pod \"ovnkube-node-88n4w\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.552739 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpfxb\" (UniqueName: \"kubernetes.io/projected/6345da6a-11cd-4a06-8586-5ea1c5a70bca-kube-api-access-mpfxb\") pod \"multus-additional-cni-plugins-q5dl9\" (UID: \"6345da6a-11cd-4a06-8586-5ea1c5a70bca\") " pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.553112 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5bw5\" (UniqueName: \"kubernetes.io/projected/92da4528-a699-45b1-aed0-d49a382bf0a1-kube-api-access-z5bw5\") pod \"multus-hzst6\" (UID: \"92da4528-a699-45b1-aed0-d49a382bf0a1\") " pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.566690 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.581699 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.599453 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.613017 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.614831 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.622181 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hzst6" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.626864 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.629396 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:50:30 crc kubenswrapper[4835]: W0202 16:50:30.633244 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92da4528_a699_45b1_aed0_d49a382bf0a1.slice/crio-b88bcec09a890818d392f74beceef8a12a5b1ba9559dd3bf954f627d7a55ba3d WatchSource:0}: Error finding container b88bcec09a890818d392f74beceef8a12a5b1ba9559dd3bf954f627d7a55ba3d: Status 404 returned error can't find the container with id b88bcec09a890818d392f74beceef8a12a5b1ba9559dd3bf954f627d7a55ba3d Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.639408 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.646022 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:30Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:30 crc kubenswrapper[4835]: W0202 16:50:30.648481 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd878a5fb_e7f6_4458_8bcc_119bf67ad45a.slice/crio-b6292a57d3db2226ee3ec001cadf39c3772bd60c935bf85ce3cf1c78760550e4 WatchSource:0}: Error finding container b6292a57d3db2226ee3ec001cadf39c3772bd60c935bf85ce3cf1c78760550e4: Status 404 returned error can't find the container with id b6292a57d3db2226ee3ec001cadf39c3772bd60c935bf85ce3cf1c78760550e4 Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.774116 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.774317 4835 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.774390 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:32.77437632 +0000 UTC m=+24.395980400 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.874979 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.875115 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:50:32.875092897 +0000 UTC m=+24.496696977 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.875341 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.875376 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:30 crc kubenswrapper[4835]: I0202 16:50:30.875434 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.875510 4835 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.875788 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:32.875774716 +0000 UTC m=+24.497378796 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.875603 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.875825 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.875839 4835 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.875869 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:32.875860589 +0000 UTC m=+24.497464669 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.875643 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.875909 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.875923 4835 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:30 crc kubenswrapper[4835]: E0202 16:50:30.875986 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:32.875968832 +0000 UTC m=+24.497572912 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.138414 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 08:48:29.970862743 +0000 UTC Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.188990 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:31 crc kubenswrapper[4835]: E0202 16:50:31.189217 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.192570 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.193365 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.194325 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.194947 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.195540 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.196119 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.196770 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.197507 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.198315 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.198977 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.199622 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.200538 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.201131 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.204503 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.205092 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.205646 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.206390 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.206900 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.207624 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.208394 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.208968 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.211431 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.211972 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.213418 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.214222 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.215719 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.216594 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.217198 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.218464 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.219067 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.220148 4835 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.220301 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.222194 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.223479 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.223965 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.226213 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.226891 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.227948 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.228665 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.229832 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.230260 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.231227 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.232354 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.232979 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.233828 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.234412 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.235486 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.236200 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.236700 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.237558 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.238006 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.238873 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.239457 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.239904 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.329089 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6"} Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.329150 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8"} Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.329162 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"b6292a57d3db2226ee3ec001cadf39c3772bd60c935bf85ce3cf1c78760550e4"} Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.331112 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" event={"ID":"6345da6a-11cd-4a06-8586-5ea1c5a70bca","Type":"ContainerStarted","Data":"df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500"} Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.331138 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" event={"ID":"6345da6a-11cd-4a06-8586-5ea1c5a70bca","Type":"ContainerStarted","Data":"d09e95ba4c7e8b3bca96c4857c5570ab97c7684a52a2c87a44673f253cb02fa9"} Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.332791 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-jq8mv" event={"ID":"56088f67-2758-4fe2-b21a-fbf6f780c704","Type":"ContainerStarted","Data":"26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270"} Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.334385 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8" exitCode=0 Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.334463 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8"} Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.334504 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"6a640515cf696d9d65af8d25e723f5274c228cc2a2b9cb7f959bd22d68c5b853"} Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.336232 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hzst6" event={"ID":"92da4528-a699-45b1-aed0-d49a382bf0a1","Type":"ContainerStarted","Data":"ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347"} Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.336273 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hzst6" event={"ID":"92da4528-a699-45b1-aed0-d49a382bf0a1","Type":"ContainerStarted","Data":"b88bcec09a890818d392f74beceef8a12a5b1ba9559dd3bf954f627d7a55ba3d"} Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.343969 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.356194 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.370458 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.385668 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.407500 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.418130 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.436203 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.451248 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.463832 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.484973 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.505173 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.520320 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.534430 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.547569 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.561447 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.576383 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.594763 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.609051 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.631169 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.644210 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.661590 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.676723 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.690601 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.702825 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.718693 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.734453 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.756529 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:31 crc kubenswrapper[4835]: I0202 16:50:31.789202 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:31Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.139341 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 05:26:16.277816209 +0000 UTC Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.187779 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.187864 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.187897 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.188000 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.345187 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9"} Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.347545 4835 generic.go:334] "Generic (PLEG): container finished" podID="6345da6a-11cd-4a06-8586-5ea1c5a70bca" containerID="df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500" exitCode=0 Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.347584 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" event={"ID":"6345da6a-11cd-4a06-8586-5ea1c5a70bca","Type":"ContainerDied","Data":"df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500"} Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.352038 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1"} Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.352076 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811"} Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.352086 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506"} Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.352095 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399"} Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.352103 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41"} Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.352111 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828"} Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.360717 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.373893 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.388448 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.400671 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.414378 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.427076 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.442651 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.464237 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.476206 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.489798 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.502396 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.521609 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.542624 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.552005 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-2qphx"] Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.552343 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2qphx" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.553923 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.553939 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.554032 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.554557 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.556050 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.568329 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.578880 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.589881 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.592333 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/30d99635-cebd-43b3-83cf-954ee6c4f2de-serviceca\") pod \"node-ca-2qphx\" (UID: \"30d99635-cebd-43b3-83cf-954ee6c4f2de\") " pod="openshift-image-registry/node-ca-2qphx" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.592369 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/30d99635-cebd-43b3-83cf-954ee6c4f2de-host\") pod \"node-ca-2qphx\" (UID: \"30d99635-cebd-43b3-83cf-954ee6c4f2de\") " pod="openshift-image-registry/node-ca-2qphx" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.592410 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6w5n\" (UniqueName: \"kubernetes.io/projected/30d99635-cebd-43b3-83cf-954ee6c4f2de-kube-api-access-t6w5n\") pod \"node-ca-2qphx\" (UID: \"30d99635-cebd-43b3-83cf-954ee6c4f2de\") " pod="openshift-image-registry/node-ca-2qphx" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.603069 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.614938 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.627139 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.637220 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.649823 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.661035 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.692653 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/30d99635-cebd-43b3-83cf-954ee6c4f2de-serviceca\") pod \"node-ca-2qphx\" (UID: \"30d99635-cebd-43b3-83cf-954ee6c4f2de\") " pod="openshift-image-registry/node-ca-2qphx" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.692689 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/30d99635-cebd-43b3-83cf-954ee6c4f2de-host\") pod \"node-ca-2qphx\" (UID: \"30d99635-cebd-43b3-83cf-954ee6c4f2de\") " pod="openshift-image-registry/node-ca-2qphx" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.692718 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6w5n\" (UniqueName: \"kubernetes.io/projected/30d99635-cebd-43b3-83cf-954ee6c4f2de-kube-api-access-t6w5n\") pod \"node-ca-2qphx\" (UID: \"30d99635-cebd-43b3-83cf-954ee6c4f2de\") " pod="openshift-image-registry/node-ca-2qphx" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.692814 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/30d99635-cebd-43b3-83cf-954ee6c4f2de-host\") pod \"node-ca-2qphx\" (UID: \"30d99635-cebd-43b3-83cf-954ee6c4f2de\") " pod="openshift-image-registry/node-ca-2qphx" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.693951 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/30d99635-cebd-43b3-83cf-954ee6c4f2de-serviceca\") pod \"node-ca-2qphx\" (UID: \"30d99635-cebd-43b3-83cf-954ee6c4f2de\") " pod="openshift-image-registry/node-ca-2qphx" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.704477 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.728908 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6w5n\" (UniqueName: \"kubernetes.io/projected/30d99635-cebd-43b3-83cf-954ee6c4f2de-kube-api-access-t6w5n\") pod \"node-ca-2qphx\" (UID: \"30d99635-cebd-43b3-83cf-954ee6c4f2de\") " pod="openshift-image-registry/node-ca-2qphx" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.768815 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.793114 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.793256 4835 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.793332 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:36.79331609 +0000 UTC m=+28.414920190 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.800484 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.847903 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.870504 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2qphx" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.887791 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.893599 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.893707 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.893735 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.893762 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.893883 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.893901 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.893915 4835 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.893960 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:36.893944475 +0000 UTC m=+28.515548555 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.894022 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:50:36.894012957 +0000 UTC m=+28.515617037 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.894073 4835 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.894100 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:36.894092619 +0000 UTC m=+28.515696699 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.894152 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.895578 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.895636 4835 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:32 crc kubenswrapper[4835]: E0202 16:50:32.895735 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:36.895708284 +0000 UTC m=+28.517312444 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:32 crc kubenswrapper[4835]: I0202 16:50:32.927049 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:32Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.140487 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 13:00:15.797294746 +0000 UTC Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.188872 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:33 crc kubenswrapper[4835]: E0202 16:50:33.189246 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.359318 4835 generic.go:334] "Generic (PLEG): container finished" podID="6345da6a-11cd-4a06-8586-5ea1c5a70bca" containerID="3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08" exitCode=0 Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.359397 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" event={"ID":"6345da6a-11cd-4a06-8586-5ea1c5a70bca","Type":"ContainerDied","Data":"3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08"} Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.362261 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2qphx" event={"ID":"30d99635-cebd-43b3-83cf-954ee6c4f2de","Type":"ContainerStarted","Data":"fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d"} Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.362330 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2qphx" event={"ID":"30d99635-cebd-43b3-83cf-954ee6c4f2de","Type":"ContainerStarted","Data":"e6f94b9d35ac1b6291606e5df620c3cc1907ae9f06abd845390818bd6b0facf6"} Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.376796 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.391886 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.407605 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.417136 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.447103 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.459845 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.487541 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.507151 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.530155 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.552608 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.565005 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.576677 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.589255 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.599908 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.609692 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.620500 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.630484 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.644470 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.690262 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.719217 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.774191 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.811015 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.841587 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.882117 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.925303 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:33 crc kubenswrapper[4835]: I0202 16:50:33.965833 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:33Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.005630 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.041981 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.084007 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.121659 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.140952 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 02:43:45.316438978 +0000 UTC Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.188234 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.188246 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:34 crc kubenswrapper[4835]: E0202 16:50:34.188385 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:34 crc kubenswrapper[4835]: E0202 16:50:34.188457 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.367575 4835 generic.go:334] "Generic (PLEG): container finished" podID="6345da6a-11cd-4a06-8586-5ea1c5a70bca" containerID="5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8" exitCode=0 Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.367642 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" event={"ID":"6345da6a-11cd-4a06-8586-5ea1c5a70bca","Type":"ContainerDied","Data":"5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8"} Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.372120 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a"} Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.395020 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.414403 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.427109 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.440154 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.454999 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.467995 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.484365 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.506637 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.520905 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.532886 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.564399 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.602057 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.609069 4835 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.611025 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.611086 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.611105 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.611266 4835 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.668561 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.674817 4835 kubelet_node_status.go:115] "Node was previously registered" node="crc" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.675177 4835 kubelet_node_status.go:79] "Successfully registered node" node="crc" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.676936 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.676977 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.676990 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.677007 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.677019 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:34Z","lastTransitionTime":"2026-02-02T16:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:34 crc kubenswrapper[4835]: E0202 16:50:34.691790 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.695536 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.695600 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.695625 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.695653 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.695675 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:34Z","lastTransitionTime":"2026-02-02T16:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:34 crc kubenswrapper[4835]: E0202 16:50:34.711831 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.715577 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.715611 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.715623 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.715638 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.715648 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:34Z","lastTransitionTime":"2026-02-02T16:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.721456 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: E0202 16:50:34.727382 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.735327 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.735396 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.735419 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.735433 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.735442 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:34Z","lastTransitionTime":"2026-02-02T16:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:34 crc kubenswrapper[4835]: E0202 16:50:34.751812 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.755226 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.755266 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.755310 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.755333 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.755353 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:34Z","lastTransitionTime":"2026-02-02T16:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.770314 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: E0202 16:50:34.776184 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:34Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:34 crc kubenswrapper[4835]: E0202 16:50:34.776422 4835 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.778009 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.778044 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.778055 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.778071 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.778083 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:34Z","lastTransitionTime":"2026-02-02T16:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.893501 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.893553 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.893570 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.893592 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.893612 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:34Z","lastTransitionTime":"2026-02-02T16:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.995940 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.995982 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.995994 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.996010 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:34 crc kubenswrapper[4835]: I0202 16:50:34.996021 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:34Z","lastTransitionTime":"2026-02-02T16:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.098341 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.098368 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.098378 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.098390 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.098398 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:35Z","lastTransitionTime":"2026-02-02T16:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.143355 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 20:48:49.100221582 +0000 UTC Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.187931 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:35 crc kubenswrapper[4835]: E0202 16:50:35.188137 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.200679 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.200737 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.200759 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.200785 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.200808 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:35Z","lastTransitionTime":"2026-02-02T16:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.303227 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.303314 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.303327 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.303376 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.303391 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:35Z","lastTransitionTime":"2026-02-02T16:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.377860 4835 generic.go:334] "Generic (PLEG): container finished" podID="6345da6a-11cd-4a06-8586-5ea1c5a70bca" containerID="cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299" exitCode=0 Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.377920 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" event={"ID":"6345da6a-11cd-4a06-8586-5ea1c5a70bca","Type":"ContainerDied","Data":"cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299"} Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.397067 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.405510 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.405605 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.405618 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.405658 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.405671 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:35Z","lastTransitionTime":"2026-02-02T16:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.415567 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.434411 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.450987 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.468731 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.484551 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.503585 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.509595 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.509631 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.509643 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.509660 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.509674 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:35Z","lastTransitionTime":"2026-02-02T16:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.523469 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.540158 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.553653 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.567363 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.582092 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.601999 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.611808 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.611874 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.611895 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.611919 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.612311 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:35Z","lastTransitionTime":"2026-02-02T16:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.615390 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.628306 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:35Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.716130 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.716167 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.716178 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.716193 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.716204 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:35Z","lastTransitionTime":"2026-02-02T16:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.819176 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.819251 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.819308 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.819338 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.819359 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:35Z","lastTransitionTime":"2026-02-02T16:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.922660 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.922713 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.922729 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.922753 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:35 crc kubenswrapper[4835]: I0202 16:50:35.922776 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:35Z","lastTransitionTime":"2026-02-02T16:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.026101 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.026158 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.026175 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.026199 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.026216 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:36Z","lastTransitionTime":"2026-02-02T16:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.128714 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.128778 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.128800 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.128831 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.128852 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:36Z","lastTransitionTime":"2026-02-02T16:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.143622 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 09:02:47.786782569 +0000 UTC Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.188008 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.188143 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.188586 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.188704 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.231934 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.232004 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.232020 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.232038 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.232049 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:36Z","lastTransitionTime":"2026-02-02T16:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.334662 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.334702 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.334713 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.334739 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.334751 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:36Z","lastTransitionTime":"2026-02-02T16:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.385396 4835 generic.go:334] "Generic (PLEG): container finished" podID="6345da6a-11cd-4a06-8586-5ea1c5a70bca" containerID="2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d" exitCode=0 Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.385467 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" event={"ID":"6345da6a-11cd-4a06-8586-5ea1c5a70bca","Type":"ContainerDied","Data":"2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.396525 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.396984 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.397039 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.397063 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.410113 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.429034 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.436783 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.436879 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.437588 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.437623 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.437634 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.437651 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.437663 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:36Z","lastTransitionTime":"2026-02-02T16:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.446791 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.462073 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.479068 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.489636 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.505532 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.522655 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.540613 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.540657 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.540670 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.540688 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.540700 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:36Z","lastTransitionTime":"2026-02-02T16:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.541731 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.556964 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.571040 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.588311 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.608340 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.620305 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.639203 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.643637 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.643673 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.643685 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.643701 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.643715 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:36Z","lastTransitionTime":"2026-02-02T16:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.651711 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.662209 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.671400 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.683131 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.696620 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.718900 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.750426 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.750473 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.750484 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.750505 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.750518 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:36Z","lastTransitionTime":"2026-02-02T16:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.763194 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.779735 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.801495 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.814424 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.829989 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.836664 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.836759 4835 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.836813 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:44.836798184 +0000 UTC m=+36.458402264 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.846151 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.852629 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.852683 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.852701 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.852725 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.852742 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:36Z","lastTransitionTime":"2026-02-02T16:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.878775 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.894972 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.911670 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.937621 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.937730 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.937763 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.937829 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:50:44.937801379 +0000 UTC m=+36.559405469 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.937872 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.937895 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.937904 4835 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.937913 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.937954 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:44.937943663 +0000 UTC m=+36.559547823 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.937907 4835 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.938013 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:44.938000874 +0000 UTC m=+36.559604954 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.938030 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.938047 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.938058 4835 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:36 crc kubenswrapper[4835]: E0202 16:50:36.938086 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 16:50:44.938077546 +0000 UTC m=+36.559681716 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.955020 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.955067 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.955082 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.955099 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.955110 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:36Z","lastTransitionTime":"2026-02-02T16:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:36 crc kubenswrapper[4835]: I0202 16:50:36.965636 4835 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.057870 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.057940 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.057963 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.057994 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.058017 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:37Z","lastTransitionTime":"2026-02-02T16:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.144528 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 18:05:44.488594661 +0000 UTC Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.160332 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.160376 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.160391 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.160411 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.160426 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:37Z","lastTransitionTime":"2026-02-02T16:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.188000 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:37 crc kubenswrapper[4835]: E0202 16:50:37.188141 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.263333 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.263408 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.263430 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.263453 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.263470 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:37Z","lastTransitionTime":"2026-02-02T16:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.365874 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.365927 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.365949 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.365971 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.365987 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:37Z","lastTransitionTime":"2026-02-02T16:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.417048 4835 generic.go:334] "Generic (PLEG): container finished" podID="6345da6a-11cd-4a06-8586-5ea1c5a70bca" containerID="944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017" exitCode=0 Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.417515 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" event={"ID":"6345da6a-11cd-4a06-8586-5ea1c5a70bca","Type":"ContainerDied","Data":"944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017"} Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.445038 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.463666 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.469845 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.469894 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.469912 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.469938 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.469954 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:37Z","lastTransitionTime":"2026-02-02T16:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.476075 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.485350 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.499485 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.526419 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.541503 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.555085 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.567694 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.573393 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.573428 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.573457 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.573473 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.573483 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:37Z","lastTransitionTime":"2026-02-02T16:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.578946 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.594645 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.604290 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.619904 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.631431 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.642681 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.652862 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.664883 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.676534 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.676577 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.676588 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.676630 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.676642 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:37Z","lastTransitionTime":"2026-02-02T16:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.679719 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.693538 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.704309 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.715102 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.726115 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.734232 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.746934 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.772236 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.778932 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.778990 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.779008 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.779030 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.779047 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:37Z","lastTransitionTime":"2026-02-02T16:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.789987 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.806429 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.825785 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.843664 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.865202 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.875877 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:37Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.881709 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.881760 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.881769 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.881787 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.881795 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:37Z","lastTransitionTime":"2026-02-02T16:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.984395 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.984634 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.984831 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.984974 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:37 crc kubenswrapper[4835]: I0202 16:50:37.985099 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:37Z","lastTransitionTime":"2026-02-02T16:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.057888 4835 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.087835 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.088145 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.088368 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.088569 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.088780 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:38Z","lastTransitionTime":"2026-02-02T16:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.145495 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 19:46:04.693145529 +0000 UTC Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.188420 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:38 crc kubenswrapper[4835]: E0202 16:50:38.188568 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.188419 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:38 crc kubenswrapper[4835]: E0202 16:50:38.188786 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.191177 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.191223 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.191236 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.191256 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.191292 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:38Z","lastTransitionTime":"2026-02-02T16:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.293003 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.293042 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.293050 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.293066 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.293077 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:38Z","lastTransitionTime":"2026-02-02T16:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.396666 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.396741 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.396767 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.396798 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.396822 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:38Z","lastTransitionTime":"2026-02-02T16:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.427333 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" event={"ID":"6345da6a-11cd-4a06-8586-5ea1c5a70bca","Type":"ContainerStarted","Data":"7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6"} Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.445080 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.460660 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.477605 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.494431 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.499394 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.499450 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.499463 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.499481 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.499493 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:38Z","lastTransitionTime":"2026-02-02T16:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.516786 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.533673 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.552178 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.573359 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.588501 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.601214 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.601251 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.601261 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.601297 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.601311 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:38Z","lastTransitionTime":"2026-02-02T16:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.601957 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.618363 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.629208 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.637388 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.649452 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.662212 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:38Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.703680 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.703714 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.703721 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.703734 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.703742 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:38Z","lastTransitionTime":"2026-02-02T16:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.805810 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.805843 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.805852 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.805866 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.805876 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:38Z","lastTransitionTime":"2026-02-02T16:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.907891 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.907925 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.907935 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.907948 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:38 crc kubenswrapper[4835]: I0202 16:50:38.907956 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:38Z","lastTransitionTime":"2026-02-02T16:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.011613 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.011660 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.011675 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.011694 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.011706 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:39Z","lastTransitionTime":"2026-02-02T16:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.115058 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.115126 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.115144 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.115171 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.115191 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:39Z","lastTransitionTime":"2026-02-02T16:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.146429 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 14:01:02.41035918 +0000 UTC Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.188217 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:39 crc kubenswrapper[4835]: E0202 16:50:39.188440 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.202970 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.218402 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.218445 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.218458 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.218509 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.218525 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:39Z","lastTransitionTime":"2026-02-02T16:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.223468 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.236317 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.248267 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.258487 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.272712 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.283058 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.296477 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.318763 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.320812 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.321043 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.321055 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.321121 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.321141 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:39Z","lastTransitionTime":"2026-02-02T16:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.328093 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.346258 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.365233 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.377453 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.398486 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.415783 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.423877 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.423913 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.423926 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.423942 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.423954 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:39Z","lastTransitionTime":"2026-02-02T16:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.432312 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/0.log" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.435851 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098" exitCode=1 Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.435887 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098"} Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.436819 4835 scope.go:117] "RemoveContainer" containerID="7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.449871 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.466768 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.485448 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.510723 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:39Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0202 16:50:39.147675 6069 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:39.147726 6069 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 16:50:39.147776 6069 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0202 16:50:39.147794 6069 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 16:50:39.147839 6069 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:39.147852 6069 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 16:50:39.147862 6069 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:39.147878 6069 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:39.147892 6069 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:39.147906 6069 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:39.147922 6069 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 16:50:39.148208 6069 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:39.148246 6069 factory.go:656] Stopping watch factory\\\\nI0202 16:50:39.148259 6069 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.527323 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.528189 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.528222 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.528237 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.528256 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.528290 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:39Z","lastTransitionTime":"2026-02-02T16:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.553406 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.571430 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.585954 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.604703 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.618576 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.628515 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.631409 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.631928 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.632113 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.632401 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.632609 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:39Z","lastTransitionTime":"2026-02-02T16:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.640364 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.655044 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.666140 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.675921 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.734862 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.734902 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.734913 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.734927 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.734937 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:39Z","lastTransitionTime":"2026-02-02T16:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.838726 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.838784 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.838801 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.838825 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.838841 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:39Z","lastTransitionTime":"2026-02-02T16:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.942192 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.942251 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.942266 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.942338 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:39 crc kubenswrapper[4835]: I0202 16:50:39.942353 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:39Z","lastTransitionTime":"2026-02-02T16:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.044890 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.044928 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.044939 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.044953 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.044964 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:40Z","lastTransitionTime":"2026-02-02T16:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.084525 4835 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.146810 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 06:05:07.499014675 +0000 UTC Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.147328 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.147357 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.147367 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.147381 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.147389 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:40Z","lastTransitionTime":"2026-02-02T16:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.188111 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.188148 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:40 crc kubenswrapper[4835]: E0202 16:50:40.188286 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:40 crc kubenswrapper[4835]: E0202 16:50:40.188440 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.249204 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.249238 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.249247 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.249261 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.249286 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:40Z","lastTransitionTime":"2026-02-02T16:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.351542 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.351585 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.351594 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.351606 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.351615 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:40Z","lastTransitionTime":"2026-02-02T16:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.439600 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/1.log" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.440144 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/0.log" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.442415 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205" exitCode=1 Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.442453 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205"} Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.442501 4835 scope.go:117] "RemoveContainer" containerID="7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.443344 4835 scope.go:117] "RemoveContainer" containerID="820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205" Feb 02 16:50:40 crc kubenswrapper[4835]: E0202 16:50:40.443491 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\"" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.454963 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.454996 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.455005 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.455020 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.455031 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:40Z","lastTransitionTime":"2026-02-02T16:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.457752 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.469449 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.484838 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.496318 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.506329 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.518100 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.527393 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.539729 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.557435 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.557484 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.557499 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.557519 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.557532 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:40Z","lastTransitionTime":"2026-02-02T16:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.567004 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.577844 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.589087 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.601300 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.617844 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.640634 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:39Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0202 16:50:39.147675 6069 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:39.147726 6069 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 16:50:39.147776 6069 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0202 16:50:39.147794 6069 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 16:50:39.147839 6069 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:39.147852 6069 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 16:50:39.147862 6069 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:39.147878 6069 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:39.147892 6069 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:39.147906 6069 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:39.147922 6069 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 16:50:39.148208 6069 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:39.148246 6069 factory.go:656] Stopping watch factory\\\\nI0202 16:50:39.148259 6069 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:40Z\\\",\\\"message\\\":\\\"ved *v1.Pod event handler 3\\\\nI0202 16:50:40.402259 6233 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 16:50:40.402265 6233 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 16:50:40.402285 6233 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:40.402308 6233 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:40.402324 6233 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:40.402303 6233 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 16:50:40.402343 6233 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 16:50:40.402350 6233 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 16:50:40.402295 6233 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:40.402432 6233 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:40.402456 6233 factory.go:656] Stopping watch factory\\\\nI0202 16:50:40.402469 6233 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:40.402310 6233 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:40.402505 6233 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.655810 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:40Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.659525 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.659567 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.659580 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.659601 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.659613 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:40Z","lastTransitionTime":"2026-02-02T16:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.762626 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.762661 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.762673 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.762690 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.762704 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:40Z","lastTransitionTime":"2026-02-02T16:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.865515 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.865558 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.865571 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.865587 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.865602 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:40Z","lastTransitionTime":"2026-02-02T16:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.969078 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.969119 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.969132 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.969150 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:40 crc kubenswrapper[4835]: I0202 16:50:40.969165 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:40Z","lastTransitionTime":"2026-02-02T16:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.071988 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.072026 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.072041 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.072064 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.072079 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:41Z","lastTransitionTime":"2026-02-02T16:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.147360 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 23:14:51.030124014 +0000 UTC Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.174743 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.174995 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.175158 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.175358 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.175521 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:41Z","lastTransitionTime":"2026-02-02T16:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.188411 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:41 crc kubenswrapper[4835]: E0202 16:50:41.188580 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.278303 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.278354 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.278374 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.278399 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.278417 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:41Z","lastTransitionTime":"2026-02-02T16:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.381086 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.381118 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.381126 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.381139 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.381147 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:41Z","lastTransitionTime":"2026-02-02T16:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.446345 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/1.log" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.487904 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.487949 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.487961 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.487976 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.487993 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:41Z","lastTransitionTime":"2026-02-02T16:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.591460 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.591532 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.591555 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.591584 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.591604 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:41Z","lastTransitionTime":"2026-02-02T16:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.694370 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.694492 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.694524 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.694553 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.694574 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:41Z","lastTransitionTime":"2026-02-02T16:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.798419 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.798482 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.798500 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.798526 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.798543 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:41Z","lastTransitionTime":"2026-02-02T16:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.902460 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.902528 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.902555 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.902585 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:41 crc kubenswrapper[4835]: I0202 16:50:41.902606 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:41Z","lastTransitionTime":"2026-02-02T16:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.005463 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.005693 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.005800 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.005907 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.006004 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:42Z","lastTransitionTime":"2026-02-02T16:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.109476 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.109529 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.109545 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.109567 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.109585 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:42Z","lastTransitionTime":"2026-02-02T16:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.148734 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 02:57:41.787879237 +0000 UTC Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.186812 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5"] Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.187671 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.188384 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.188400 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:42 crc kubenswrapper[4835]: E0202 16:50:42.188543 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:42 crc kubenswrapper[4835]: E0202 16:50:42.188733 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.190462 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.190906 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.207523 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.212330 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.212375 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.212384 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.212398 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.212407 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:42Z","lastTransitionTime":"2026-02-02T16:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.225642 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.245526 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.263991 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.280697 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.294424 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.297860 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/88fd225f-00aa-432d-a669-8415fa06fa9e-env-overrides\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.297933 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btklq\" (UniqueName: \"kubernetes.io/projected/88fd225f-00aa-432d-a669-8415fa06fa9e-kube-api-access-btklq\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.298022 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/88fd225f-00aa-432d-a669-8415fa06fa9e-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.298067 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/88fd225f-00aa-432d-a669-8415fa06fa9e-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.306217 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.314716 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.314765 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.314783 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.314805 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.314823 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:42Z","lastTransitionTime":"2026-02-02T16:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.326391 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.347134 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.365489 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.382070 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.398921 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/88fd225f-00aa-432d-a669-8415fa06fa9e-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.398967 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/88fd225f-00aa-432d-a669-8415fa06fa9e-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.399020 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/88fd225f-00aa-432d-a669-8415fa06fa9e-env-overrides\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.399059 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btklq\" (UniqueName: \"kubernetes.io/projected/88fd225f-00aa-432d-a669-8415fa06fa9e-kube-api-access-btklq\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.399673 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/88fd225f-00aa-432d-a669-8415fa06fa9e-env-overrides\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.399989 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.400704 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/88fd225f-00aa-432d-a669-8415fa06fa9e-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.404816 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/88fd225f-00aa-432d-a669-8415fa06fa9e-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.415471 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.419179 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.419233 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.419247 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.419264 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.419301 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:42Z","lastTransitionTime":"2026-02-02T16:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.444152 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btklq\" (UniqueName: \"kubernetes.io/projected/88fd225f-00aa-432d-a669-8415fa06fa9e-kube-api-access-btklq\") pod \"ovnkube-control-plane-749d76644c-67xl5\" (UID: \"88fd225f-00aa-432d-a669-8415fa06fa9e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.450746 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:39Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0202 16:50:39.147675 6069 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:39.147726 6069 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 16:50:39.147776 6069 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0202 16:50:39.147794 6069 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 16:50:39.147839 6069 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:39.147852 6069 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 16:50:39.147862 6069 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:39.147878 6069 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:39.147892 6069 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:39.147906 6069 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:39.147922 6069 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 16:50:39.148208 6069 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:39.148246 6069 factory.go:656] Stopping watch factory\\\\nI0202 16:50:39.148259 6069 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:40Z\\\",\\\"message\\\":\\\"ved *v1.Pod event handler 3\\\\nI0202 16:50:40.402259 6233 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 16:50:40.402265 6233 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 16:50:40.402285 6233 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:40.402308 6233 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:40.402324 6233 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:40.402303 6233 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 16:50:40.402343 6233 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 16:50:40.402350 6233 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 16:50:40.402295 6233 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:40.402432 6233 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:40.402456 6233 factory.go:656] Stopping watch factory\\\\nI0202 16:50:40.402469 6233 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:40.402310 6233 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:40.402505 6233 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.468167 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.486797 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:42Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.502166 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" Feb 02 16:50:42 crc kubenswrapper[4835]: W0202 16:50:42.514981 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88fd225f_00aa_432d_a669_8415fa06fa9e.slice/crio-5606170db3e8b9a36b061d71305ea82bd54d13564333bcd14748ad9ce148dfd0 WatchSource:0}: Error finding container 5606170db3e8b9a36b061d71305ea82bd54d13564333bcd14748ad9ce148dfd0: Status 404 returned error can't find the container with id 5606170db3e8b9a36b061d71305ea82bd54d13564333bcd14748ad9ce148dfd0 Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.522037 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.522083 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.522093 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.522111 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.522123 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:42Z","lastTransitionTime":"2026-02-02T16:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.624484 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.624519 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.624531 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.624546 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.624556 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:42Z","lastTransitionTime":"2026-02-02T16:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.726859 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.726910 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.726925 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.726944 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.726955 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:42Z","lastTransitionTime":"2026-02-02T16:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.829478 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.829521 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.829532 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.829549 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.829560 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:42Z","lastTransitionTime":"2026-02-02T16:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.931684 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.931978 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.931989 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.932008 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:42 crc kubenswrapper[4835]: I0202 16:50:42.932019 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:42Z","lastTransitionTime":"2026-02-02T16:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.034267 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.034348 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.034365 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.034389 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.034405 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:43Z","lastTransitionTime":"2026-02-02T16:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.137036 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.137081 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.137089 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.137103 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.137113 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:43Z","lastTransitionTime":"2026-02-02T16:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.149725 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 22:29:19.195644443 +0000 UTC Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.188382 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:43 crc kubenswrapper[4835]: E0202 16:50:43.188550 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.239612 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.239671 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.239690 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.239714 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.239733 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:43Z","lastTransitionTime":"2026-02-02T16:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.342734 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.342780 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.342800 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.342830 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.342851 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:43Z","lastTransitionTime":"2026-02-02T16:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.446496 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.446555 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.446574 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.446596 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.446612 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:43Z","lastTransitionTime":"2026-02-02T16:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.458574 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" event={"ID":"88fd225f-00aa-432d-a669-8415fa06fa9e","Type":"ContainerStarted","Data":"f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.458646 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" event={"ID":"88fd225f-00aa-432d-a669-8415fa06fa9e","Type":"ContainerStarted","Data":"8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.458667 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" event={"ID":"88fd225f-00aa-432d-a669-8415fa06fa9e","Type":"ContainerStarted","Data":"5606170db3e8b9a36b061d71305ea82bd54d13564333bcd14748ad9ce148dfd0"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.475491 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.490469 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.510459 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.539067 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.549479 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.549513 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.549523 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.549541 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.549552 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:43Z","lastTransitionTime":"2026-02-02T16:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.553559 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.567591 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.582756 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.603589 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.618123 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.633528 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.645446 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-fbl8t"] Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.646033 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:43 crc kubenswrapper[4835]: E0202 16:50:43.646111 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.652058 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.652100 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.652112 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.652127 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.652138 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:43Z","lastTransitionTime":"2026-02-02T16:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.654479 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:39Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0202 16:50:39.147675 6069 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:39.147726 6069 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 16:50:39.147776 6069 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0202 16:50:39.147794 6069 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 16:50:39.147839 6069 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:39.147852 6069 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 16:50:39.147862 6069 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:39.147878 6069 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:39.147892 6069 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:39.147906 6069 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:39.147922 6069 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 16:50:39.148208 6069 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:39.148246 6069 factory.go:656] Stopping watch factory\\\\nI0202 16:50:39.148259 6069 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:40Z\\\",\\\"message\\\":\\\"ved *v1.Pod event handler 3\\\\nI0202 16:50:40.402259 6233 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 16:50:40.402265 6233 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 16:50:40.402285 6233 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:40.402308 6233 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:40.402324 6233 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:40.402303 6233 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 16:50:40.402343 6233 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 16:50:40.402350 6233 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 16:50:40.402295 6233 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:40.402432 6233 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:40.402456 6233 factory.go:656] Stopping watch factory\\\\nI0202 16:50:40.402469 6233 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:40.402310 6233 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:40.402505 6233 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.664115 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.683440 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.702828 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.712903 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7th6j\" (UniqueName: \"kubernetes.io/projected/5f2e42e3-ff22-4273-9a65-d7e55792155e-kube-api-access-7th6j\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.712967 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.720610 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.740197 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.754739 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.754796 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.754813 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.754838 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.754855 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:43Z","lastTransitionTime":"2026-02-02T16:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.757308 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.783542 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.803391 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.814519 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7th6j\" (UniqueName: \"kubernetes.io/projected/5f2e42e3-ff22-4273-9a65-d7e55792155e-kube-api-access-7th6j\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.814572 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:43 crc kubenswrapper[4835]: E0202 16:50:43.814725 4835 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:43 crc kubenswrapper[4835]: E0202 16:50:43.814810 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs podName:5f2e42e3-ff22-4273-9a65-d7e55792155e nodeName:}" failed. No retries permitted until 2026-02-02 16:50:44.314784469 +0000 UTC m=+35.936388549 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs") pod "network-metrics-daemon-fbl8t" (UID: "5f2e42e3-ff22-4273-9a65-d7e55792155e") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.822289 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.837209 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.838401 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7th6j\" (UniqueName: \"kubernetes.io/projected/5f2e42e3-ff22-4273-9a65-d7e55792155e-kube-api-access-7th6j\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.857253 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.857317 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.857330 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.857351 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.857364 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:43Z","lastTransitionTime":"2026-02-02T16:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.859373 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.883212 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:39Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0202 16:50:39.147675 6069 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:39.147726 6069 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 16:50:39.147776 6069 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0202 16:50:39.147794 6069 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 16:50:39.147839 6069 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:39.147852 6069 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 16:50:39.147862 6069 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:39.147878 6069 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:39.147892 6069 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:39.147906 6069 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:39.147922 6069 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 16:50:39.148208 6069 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:39.148246 6069 factory.go:656] Stopping watch factory\\\\nI0202 16:50:39.148259 6069 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:40Z\\\",\\\"message\\\":\\\"ved *v1.Pod event handler 3\\\\nI0202 16:50:40.402259 6233 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 16:50:40.402265 6233 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 16:50:40.402285 6233 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:40.402308 6233 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:40.402324 6233 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:40.402303 6233 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 16:50:40.402343 6233 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 16:50:40.402350 6233 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 16:50:40.402295 6233 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:40.402432 6233 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:40.402456 6233 factory.go:656] Stopping watch factory\\\\nI0202 16:50:40.402469 6233 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:40.402310 6233 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:40.402505 6233 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.899729 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.916980 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.935491 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.955384 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.959799 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.959828 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.959838 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.959853 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.959863 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:43Z","lastTransitionTime":"2026-02-02T16:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.969779 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:43 crc kubenswrapper[4835]: I0202 16:50:43.984132 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.001865 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:43Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.014494 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:44Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.032329 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:44Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.046021 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:44Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.063201 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.063246 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.063254 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.063290 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.063302 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.150352 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 05:46:23.312737683 +0000 UTC Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.165848 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.165895 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.165905 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.165919 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.165928 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.188722 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.188734 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.188940 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.189064 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.268584 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.268649 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.268665 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.268690 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.268707 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.321264 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.321547 4835 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.321707 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs podName:5f2e42e3-ff22-4273-9a65-d7e55792155e nodeName:}" failed. No retries permitted until 2026-02-02 16:50:45.321654637 +0000 UTC m=+36.943258757 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs") pod "network-metrics-daemon-fbl8t" (UID: "5f2e42e3-ff22-4273-9a65-d7e55792155e") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.370980 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.371016 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.371092 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.371111 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.371121 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.473645 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.473722 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.473750 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.473782 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.473810 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.577077 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.577123 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.577134 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.577153 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.577166 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.680402 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.680449 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.680459 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.680475 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.680487 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.783503 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.783558 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.783575 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.783599 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.783617 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.808531 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.808605 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.808627 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.808653 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.808674 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.826680 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:44Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.831486 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.831538 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.831557 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.831580 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.831596 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.847571 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:44Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.851548 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.851599 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.851615 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.851636 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.851652 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.866587 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:44Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.871716 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.871789 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.872140 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.872178 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.872197 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.891539 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:44Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.895749 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.895780 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.895789 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.895804 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.895813 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.914422 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:44Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.914590 4835 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.916740 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.916864 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.916938 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.917014 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.917088 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:44Z","lastTransitionTime":"2026-02-02T16:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:44 crc kubenswrapper[4835]: I0202 16:50:44.927493 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.927590 4835 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:44 crc kubenswrapper[4835]: E0202 16:50:44.927655 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:51:00.927638669 +0000 UTC m=+52.549242749 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.020433 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.020494 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.020513 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.020536 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.020553 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:45Z","lastTransitionTime":"2026-02-02T16:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.028787 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.028938 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.028975 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:51:01.028940403 +0000 UTC m=+52.650544523 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.029059 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.029075 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.029085 4835 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.029128 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 16:51:01.029114078 +0000 UTC m=+52.650718158 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.029144 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.029165 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.029222 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.029239 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.029247 4835 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.029266 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 16:51:01.029259882 +0000 UTC m=+52.650863962 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.029361 4835 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.029437 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:51:01.029421087 +0000 UTC m=+52.651025207 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.122604 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.122643 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.122655 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.122673 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.122685 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:45Z","lastTransitionTime":"2026-02-02T16:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.151237 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 14:15:45.56913187 +0000 UTC Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.188621 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.188677 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.188744 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.188929 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.226405 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.226446 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.226457 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.226472 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.226484 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:45Z","lastTransitionTime":"2026-02-02T16:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.329079 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.329123 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.329136 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.329156 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.329168 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:45Z","lastTransitionTime":"2026-02-02T16:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.332562 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.332692 4835 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:45 crc kubenswrapper[4835]: E0202 16:50:45.332765 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs podName:5f2e42e3-ff22-4273-9a65-d7e55792155e nodeName:}" failed. No retries permitted until 2026-02-02 16:50:47.332745022 +0000 UTC m=+38.954349172 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs") pod "network-metrics-daemon-fbl8t" (UID: "5f2e42e3-ff22-4273-9a65-d7e55792155e") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.431692 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.431737 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.431748 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.431764 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.431776 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:45Z","lastTransitionTime":"2026-02-02T16:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.534452 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.534512 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.534531 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.534556 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.534575 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:45Z","lastTransitionTime":"2026-02-02T16:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.638088 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.638137 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.638148 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.638165 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.638178 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:45Z","lastTransitionTime":"2026-02-02T16:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.740678 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.740726 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.740737 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.740755 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.740769 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:45Z","lastTransitionTime":"2026-02-02T16:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.843785 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.843831 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.843839 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.843853 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.843866 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:45Z","lastTransitionTime":"2026-02-02T16:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.946759 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.946822 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.946840 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.946864 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:45 crc kubenswrapper[4835]: I0202 16:50:45.946881 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:45Z","lastTransitionTime":"2026-02-02T16:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.050723 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.050796 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.050822 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.050852 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.050876 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:46Z","lastTransitionTime":"2026-02-02T16:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.152121 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 05:39:50.684566348 +0000 UTC Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.154530 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.154586 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.154599 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.154619 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.154634 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:46Z","lastTransitionTime":"2026-02-02T16:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.188067 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.188136 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:46 crc kubenswrapper[4835]: E0202 16:50:46.188266 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:46 crc kubenswrapper[4835]: E0202 16:50:46.188465 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.256647 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.256728 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.256778 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.256798 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.256813 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:46Z","lastTransitionTime":"2026-02-02T16:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.359246 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.359306 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.359315 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.359328 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.359338 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:46Z","lastTransitionTime":"2026-02-02T16:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.462641 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.462682 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.462691 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.462706 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.462714 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:46Z","lastTransitionTime":"2026-02-02T16:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.565120 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.565194 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.565235 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.565265 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.565346 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:46Z","lastTransitionTime":"2026-02-02T16:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.668445 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.668601 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.668622 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.668650 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.668667 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:46Z","lastTransitionTime":"2026-02-02T16:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.771735 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.771799 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.771819 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.771843 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.771862 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:46Z","lastTransitionTime":"2026-02-02T16:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.874750 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.874851 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.874922 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.874948 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.874965 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:46Z","lastTransitionTime":"2026-02-02T16:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.977148 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.977196 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.977214 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.977236 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:46 crc kubenswrapper[4835]: I0202 16:50:46.977252 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:46Z","lastTransitionTime":"2026-02-02T16:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.079676 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.079739 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.079757 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.079780 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.079797 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:47Z","lastTransitionTime":"2026-02-02T16:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.153319 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 23:08:23.883441363 +0000 UTC Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.182701 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.182750 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.182764 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.182781 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.182792 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:47Z","lastTransitionTime":"2026-02-02T16:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.187891 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:47 crc kubenswrapper[4835]: E0202 16:50:47.188065 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.187906 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:47 crc kubenswrapper[4835]: E0202 16:50:47.188198 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.286000 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.286109 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.286132 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.286156 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.286174 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:47Z","lastTransitionTime":"2026-02-02T16:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.351430 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:47 crc kubenswrapper[4835]: E0202 16:50:47.351660 4835 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:47 crc kubenswrapper[4835]: E0202 16:50:47.351759 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs podName:5f2e42e3-ff22-4273-9a65-d7e55792155e nodeName:}" failed. No retries permitted until 2026-02-02 16:50:51.351727943 +0000 UTC m=+42.973332063 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs") pod "network-metrics-daemon-fbl8t" (UID: "5f2e42e3-ff22-4273-9a65-d7e55792155e") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.389256 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.389318 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.389327 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.389344 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.389354 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:47Z","lastTransitionTime":"2026-02-02T16:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.492116 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.492189 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.492211 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.492243 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.492266 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:47Z","lastTransitionTime":"2026-02-02T16:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.594954 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.595022 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.595041 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.595067 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.595085 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:47Z","lastTransitionTime":"2026-02-02T16:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.698542 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.698591 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.698608 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.698632 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.698649 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:47Z","lastTransitionTime":"2026-02-02T16:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.801581 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.801649 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.801662 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.801687 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.801701 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:47Z","lastTransitionTime":"2026-02-02T16:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.904416 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.904478 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.904529 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.904826 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:47 crc kubenswrapper[4835]: I0202 16:50:47.904924 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:47Z","lastTransitionTime":"2026-02-02T16:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.008192 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.008260 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.008321 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.008356 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.008380 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:48Z","lastTransitionTime":"2026-02-02T16:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.111081 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.111148 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.111161 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.111226 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.111237 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:48Z","lastTransitionTime":"2026-02-02T16:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.154159 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 20:40:59.628830342 +0000 UTC Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.188788 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.188871 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:48 crc kubenswrapper[4835]: E0202 16:50:48.188929 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:48 crc kubenswrapper[4835]: E0202 16:50:48.188983 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.213920 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.213953 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.213962 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.213991 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.214002 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:48Z","lastTransitionTime":"2026-02-02T16:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.316440 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.316493 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.316504 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.316521 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.316533 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:48Z","lastTransitionTime":"2026-02-02T16:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.420310 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.420355 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.420366 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.420392 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.420405 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:48Z","lastTransitionTime":"2026-02-02T16:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.523434 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.523467 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.523477 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.523510 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.523522 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:48Z","lastTransitionTime":"2026-02-02T16:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.626418 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.626503 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.626534 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.626565 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.626586 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:48Z","lastTransitionTime":"2026-02-02T16:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.729154 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.729197 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.729209 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.729222 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.729233 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:48Z","lastTransitionTime":"2026-02-02T16:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.831862 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.831911 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.831922 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.831940 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.831954 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:48Z","lastTransitionTime":"2026-02-02T16:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.935700 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.935774 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.935792 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.935820 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:48 crc kubenswrapper[4835]: I0202 16:50:48.935838 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:48Z","lastTransitionTime":"2026-02-02T16:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.038209 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.038327 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.038351 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.038380 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.038406 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:49Z","lastTransitionTime":"2026-02-02T16:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.141001 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.141030 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.141038 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.141050 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.141058 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:49Z","lastTransitionTime":"2026-02-02T16:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.154500 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 18:57:16.200798618 +0000 UTC Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.188491 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.188496 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:49 crc kubenswrapper[4835]: E0202 16:50:49.188721 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:50:49 crc kubenswrapper[4835]: E0202 16:50:49.189244 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.205458 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.223927 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.236618 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.243572 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.243610 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.243622 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.243638 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.243650 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:49Z","lastTransitionTime":"2026-02-02T16:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.250228 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.264203 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.283053 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.297232 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.312345 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.330171 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.340723 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.346683 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.346738 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.346753 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.346786 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.346849 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:49Z","lastTransitionTime":"2026-02-02T16:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.361946 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.374702 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.386175 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.398024 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.412528 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.431402 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7103e54873e113251c13656a624b3a0d8471ee038a3ebdb8d6d9f8410a067098\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:39Z\\\",\\\"message\\\":\\\" k8s.io/client-go/informers/factory.go:160\\\\nI0202 16:50:39.147675 6069 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:39.147726 6069 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 16:50:39.147776 6069 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0202 16:50:39.147794 6069 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 16:50:39.147839 6069 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:39.147852 6069 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 16:50:39.147862 6069 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:39.147878 6069 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:39.147892 6069 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:39.147906 6069 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:39.147922 6069 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 16:50:39.148208 6069 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:39.148246 6069 factory.go:656] Stopping watch factory\\\\nI0202 16:50:39.148259 6069 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:40Z\\\",\\\"message\\\":\\\"ved *v1.Pod event handler 3\\\\nI0202 16:50:40.402259 6233 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 16:50:40.402265 6233 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 16:50:40.402285 6233 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:40.402308 6233 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:40.402324 6233 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:40.402303 6233 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 16:50:40.402343 6233 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 16:50:40.402350 6233 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 16:50:40.402295 6233 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:40.402432 6233 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:40.402456 6233 factory.go:656] Stopping watch factory\\\\nI0202 16:50:40.402469 6233 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:40.402310 6233 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:40.402505 6233 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.445036 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.449660 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.449714 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.449729 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.449747 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.449760 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:49Z","lastTransitionTime":"2026-02-02T16:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.552123 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.552174 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.552184 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.552199 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.552209 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:49Z","lastTransitionTime":"2026-02-02T16:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.655190 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.655256 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.655292 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.655315 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.655331 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:49Z","lastTransitionTime":"2026-02-02T16:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.757847 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.757896 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.757906 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.757919 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.757931 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:49Z","lastTransitionTime":"2026-02-02T16:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.860944 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.860986 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.860997 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.861011 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.861024 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:49Z","lastTransitionTime":"2026-02-02T16:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.963965 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.964046 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.964071 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.964101 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:49 crc kubenswrapper[4835]: I0202 16:50:49.964122 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:49Z","lastTransitionTime":"2026-02-02T16:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.067455 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.067517 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.067535 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.067559 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.067582 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:50Z","lastTransitionTime":"2026-02-02T16:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.155471 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 08:33:08.561893243 +0000 UTC Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.170094 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.170157 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.170170 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.170187 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.170197 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:50Z","lastTransitionTime":"2026-02-02T16:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.187837 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.187935 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:50 crc kubenswrapper[4835]: E0202 16:50:50.188006 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:50 crc kubenswrapper[4835]: E0202 16:50:50.188168 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.272927 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.272967 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.273070 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.273090 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.273102 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:50Z","lastTransitionTime":"2026-02-02T16:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.375422 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.375484 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.375503 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.375524 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.375553 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:50Z","lastTransitionTime":"2026-02-02T16:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.478671 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.478743 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.478768 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.478798 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.478819 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:50Z","lastTransitionTime":"2026-02-02T16:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.582616 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.582686 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.582711 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.582755 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.582779 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:50Z","lastTransitionTime":"2026-02-02T16:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.684984 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.685053 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.685066 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.685081 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.685092 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:50Z","lastTransitionTime":"2026-02-02T16:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.788380 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.788473 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.788492 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.788514 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.788531 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:50Z","lastTransitionTime":"2026-02-02T16:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.891438 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.891510 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.891542 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.891575 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.891597 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:50Z","lastTransitionTime":"2026-02-02T16:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.994452 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.994525 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.994549 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.994577 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:50 crc kubenswrapper[4835]: I0202 16:50:50.994598 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:50Z","lastTransitionTime":"2026-02-02T16:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.097444 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.097514 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.097539 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.097565 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.097581 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:51Z","lastTransitionTime":"2026-02-02T16:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.156197 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 23:13:33.453444531 +0000 UTC Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.188972 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.188973 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:51 crc kubenswrapper[4835]: E0202 16:50:51.189206 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:50:51 crc kubenswrapper[4835]: E0202 16:50:51.189266 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.200178 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.200230 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.200247 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.200293 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.200312 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:51Z","lastTransitionTime":"2026-02-02T16:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.303067 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.303120 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.303137 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.303160 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.303178 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:51Z","lastTransitionTime":"2026-02-02T16:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.394778 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:51 crc kubenswrapper[4835]: E0202 16:50:51.395004 4835 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:51 crc kubenswrapper[4835]: E0202 16:50:51.395098 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs podName:5f2e42e3-ff22-4273-9a65-d7e55792155e nodeName:}" failed. No retries permitted until 2026-02-02 16:50:59.395068752 +0000 UTC m=+51.016672872 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs") pod "network-metrics-daemon-fbl8t" (UID: "5f2e42e3-ff22-4273-9a65-d7e55792155e") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.405924 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.406148 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.406183 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.406211 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.406232 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:51Z","lastTransitionTime":"2026-02-02T16:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.508492 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.508574 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.508587 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.508605 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.508619 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:51Z","lastTransitionTime":"2026-02-02T16:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.611220 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.611253 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.611262 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.611293 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.611303 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:51Z","lastTransitionTime":"2026-02-02T16:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.714389 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.714488 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.714508 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.714535 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.714577 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:51Z","lastTransitionTime":"2026-02-02T16:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.816961 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.817020 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.817033 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.817052 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.817063 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:51Z","lastTransitionTime":"2026-02-02T16:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.919381 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.919459 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.919472 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.919490 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:51 crc kubenswrapper[4835]: I0202 16:50:51.919502 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:51Z","lastTransitionTime":"2026-02-02T16:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.022737 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.022789 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.022804 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.022823 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.022837 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:52Z","lastTransitionTime":"2026-02-02T16:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.125755 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.125819 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.125834 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.125860 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.125875 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:52Z","lastTransitionTime":"2026-02-02T16:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.157359 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 21:40:31.969462457 +0000 UTC Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.187989 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.188077 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:52 crc kubenswrapper[4835]: E0202 16:50:52.188115 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:52 crc kubenswrapper[4835]: E0202 16:50:52.188218 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.228504 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.228533 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.228541 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.228554 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.228563 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:52Z","lastTransitionTime":"2026-02-02T16:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.330932 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.330971 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.330981 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.330995 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.331008 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:52Z","lastTransitionTime":"2026-02-02T16:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.433526 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.433577 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.433595 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.433617 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.433636 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:52Z","lastTransitionTime":"2026-02-02T16:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.536042 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.536373 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.536545 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.536609 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.536633 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:52Z","lastTransitionTime":"2026-02-02T16:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.639649 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.639703 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.639714 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.639735 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.639750 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:52Z","lastTransitionTime":"2026-02-02T16:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.743109 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.743185 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.743208 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.743237 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.743258 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:52Z","lastTransitionTime":"2026-02-02T16:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.846480 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.846555 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.846580 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.846611 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.846636 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:52Z","lastTransitionTime":"2026-02-02T16:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.949206 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.949291 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.949307 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.949333 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:52 crc kubenswrapper[4835]: I0202 16:50:52.949350 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:52Z","lastTransitionTime":"2026-02-02T16:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.052607 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.052651 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.052661 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.052678 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.052688 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:53Z","lastTransitionTime":"2026-02-02T16:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.156412 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.156477 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.156495 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.156526 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.156544 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:53Z","lastTransitionTime":"2026-02-02T16:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.157739 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 13:01:22.379224551 +0000 UTC Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.188310 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.188418 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:53 crc kubenswrapper[4835]: E0202 16:50:53.188521 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:50:53 crc kubenswrapper[4835]: E0202 16:50:53.188706 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.259806 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.259889 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.259911 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.259941 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.259964 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:53Z","lastTransitionTime":"2026-02-02T16:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.363557 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.363643 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.363670 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.363700 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.363723 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:53Z","lastTransitionTime":"2026-02-02T16:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.466469 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.466516 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.466547 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.466563 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.466572 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:53Z","lastTransitionTime":"2026-02-02T16:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.568866 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.568901 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.568911 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.568923 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.568931 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:53Z","lastTransitionTime":"2026-02-02T16:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.672812 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.672863 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.672873 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.672893 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.672905 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:53Z","lastTransitionTime":"2026-02-02T16:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.775913 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.775965 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.775976 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.775994 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.776006 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:53Z","lastTransitionTime":"2026-02-02T16:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.879367 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.879437 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.879456 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.879482 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.879500 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:53Z","lastTransitionTime":"2026-02-02T16:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.983087 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.983136 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.983148 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.983164 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:53 crc kubenswrapper[4835]: I0202 16:50:53.983176 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:53Z","lastTransitionTime":"2026-02-02T16:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.086489 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.086562 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.086579 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.086602 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.086620 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:54Z","lastTransitionTime":"2026-02-02T16:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.158239 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 00:29:52.953777786 +0000 UTC Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.187849 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.187888 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:54 crc kubenswrapper[4835]: E0202 16:50:54.188127 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:54 crc kubenswrapper[4835]: E0202 16:50:54.188183 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.189690 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.189726 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.189734 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.189747 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.189759 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:54Z","lastTransitionTime":"2026-02-02T16:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.291847 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.291906 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.291918 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.291932 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.291946 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:54Z","lastTransitionTime":"2026-02-02T16:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.395026 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.395058 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.395080 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.395093 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.395104 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:54Z","lastTransitionTime":"2026-02-02T16:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.498550 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.498986 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.498999 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.499024 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.499037 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:54Z","lastTransitionTime":"2026-02-02T16:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.600867 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.600921 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.600936 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.600950 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.600963 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:54Z","lastTransitionTime":"2026-02-02T16:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.702849 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.702893 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.702904 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.702920 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.702929 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:54Z","lastTransitionTime":"2026-02-02T16:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.806014 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.806058 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.806076 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.806098 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.806116 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:54Z","lastTransitionTime":"2026-02-02T16:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.908790 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.908854 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.908874 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.908903 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:54 crc kubenswrapper[4835]: I0202 16:50:54.908925 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:54Z","lastTransitionTime":"2026-02-02T16:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.011254 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.011310 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.011321 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.011343 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.011395 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.026796 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.026888 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.026910 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.026931 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.026948 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: E0202 16:50:55.042746 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.046597 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.046631 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.046641 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.046657 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.046667 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: E0202 16:50:55.062374 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.066444 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.066510 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.066534 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.066559 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.066576 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: E0202 16:50:55.083523 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.087135 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.087177 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.087188 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.087203 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.087254 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: E0202 16:50:55.101902 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.106021 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.106091 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.106114 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.106137 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.106158 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: E0202 16:50:55.126906 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: E0202 16:50:55.127213 4835 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.128951 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.128998 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.129020 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.129050 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.129073 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.160427 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 13:03:30.486394404 +0000 UTC Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.188695 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.188972 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:55 crc kubenswrapper[4835]: E0202 16:50:55.189150 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:50:55 crc kubenswrapper[4835]: E0202 16:50:55.189222 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.189266 4835 scope.go:117] "RemoveContainer" containerID="820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.200914 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.213644 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.226397 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.231377 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.231413 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.231425 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.231443 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.231455 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.247184 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.262784 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.273089 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.285163 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.294018 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.313512 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.325112 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.334785 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.334824 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.334835 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.334853 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.334865 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.337233 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.355101 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:40Z\\\",\\\"message\\\":\\\"ved *v1.Pod event handler 3\\\\nI0202 16:50:40.402259 6233 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 16:50:40.402265 6233 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 16:50:40.402285 6233 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:40.402308 6233 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:40.402324 6233 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:40.402303 6233 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 16:50:40.402343 6233 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 16:50:40.402350 6233 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 16:50:40.402295 6233 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:40.402432 6233 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:40.402456 6233 factory.go:656] Stopping watch factory\\\\nI0202 16:50:40.402469 6233 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:40.402310 6233 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:40.402505 6233 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.366203 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.384110 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.394515 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.403837 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.413216 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.436923 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.436968 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.436976 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.436990 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.436998 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.508850 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/1.log" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.511647 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070"} Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.512117 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.524505 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.535145 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.538832 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.538873 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.538885 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.538902 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.538914 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.546242 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.559102 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.580517 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:40Z\\\",\\\"message\\\":\\\"ved *v1.Pod event handler 3\\\\nI0202 16:50:40.402259 6233 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 16:50:40.402265 6233 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 16:50:40.402285 6233 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:40.402308 6233 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:40.402324 6233 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:40.402303 6233 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 16:50:40.402343 6233 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 16:50:40.402350 6233 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 16:50:40.402295 6233 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:40.402432 6233 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:40.402456 6233 factory.go:656] Stopping watch factory\\\\nI0202 16:50:40.402469 6233 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:40.402310 6233 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:40.402505 6233 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.590154 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.620877 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.635464 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.640972 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.641003 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.641015 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.641030 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.641041 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.652903 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.667475 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.680254 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.696542 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.716963 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.727089 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.738964 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.742765 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.742812 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.742823 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.742837 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.742845 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.747923 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.758161 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:55Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.845739 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.845777 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.845787 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.845802 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.845811 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.947909 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.947955 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.947965 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.947981 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:55 crc kubenswrapper[4835]: I0202 16:50:55.947996 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:55Z","lastTransitionTime":"2026-02-02T16:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.050426 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.050465 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.050477 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.050496 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.050507 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:56Z","lastTransitionTime":"2026-02-02T16:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.153444 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.153519 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.153956 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.154020 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.154040 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:56Z","lastTransitionTime":"2026-02-02T16:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.161161 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 23:39:52.297314569 +0000 UTC Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.187889 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.187900 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:56 crc kubenswrapper[4835]: E0202 16:50:56.188046 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:56 crc kubenswrapper[4835]: E0202 16:50:56.188162 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.257262 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.257357 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.257370 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.257387 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.257397 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:56Z","lastTransitionTime":"2026-02-02T16:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.360096 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.360146 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.360160 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.360180 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.360195 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:56Z","lastTransitionTime":"2026-02-02T16:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.463036 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.463100 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.463125 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.463157 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.463181 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:56Z","lastTransitionTime":"2026-02-02T16:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.516142 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/2.log" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.517219 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/1.log" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.520672 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070" exitCode=1 Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.520713 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070"} Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.520750 4835 scope.go:117] "RemoveContainer" containerID="820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.521478 4835 scope.go:117] "RemoveContainer" containerID="d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070" Feb 02 16:50:56 crc kubenswrapper[4835]: E0202 16:50:56.521642 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\"" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.542979 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.561854 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.565398 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.565481 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.565505 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.565536 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.565560 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:56Z","lastTransitionTime":"2026-02-02T16:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.583003 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.600788 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.635387 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820bbf2a12c832fbb4d84be59b344b3a4ed18ced28f0da95bb56ddc3ddb9e205\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:40Z\\\",\\\"message\\\":\\\"ved *v1.Pod event handler 3\\\\nI0202 16:50:40.402259 6233 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 16:50:40.402265 6233 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 16:50:40.402285 6233 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 16:50:40.402308 6233 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 16:50:40.402324 6233 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 16:50:40.402303 6233 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 16:50:40.402343 6233 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0202 16:50:40.402350 6233 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 16:50:40.402295 6233 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 16:50:40.402432 6233 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 16:50:40.402456 6233 factory.go:656] Stopping watch factory\\\\nI0202 16:50:40.402469 6233 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 16:50:40.402310 6233 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 16:50:40.402505 6233 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:56Z\\\",\\\"message\\\":\\\"}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 16:50:56.110261 6451 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110442 6451 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110466 6451 ovn.go:134] Ensuring zone local for Pod openshift-kube-controller-manager/kube-controller-manager-crc in node crc\\\\nI0202 16:50:56.110370 6451 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nF0202 16:50:56.109968 6451 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.649389 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.668069 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.668116 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.668128 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.668146 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.668157 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:56Z","lastTransitionTime":"2026-02-02T16:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.672428 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.688824 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.704678 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.719801 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.739467 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.755241 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.770703 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.770746 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.770756 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.770782 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.770795 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:56Z","lastTransitionTime":"2026-02-02T16:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.773187 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.790702 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.806954 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.821737 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.835917 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:56Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.873918 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.873943 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.873951 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.873965 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.873974 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:56Z","lastTransitionTime":"2026-02-02T16:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.976358 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.976402 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.976419 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.976435 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:56 crc kubenswrapper[4835]: I0202 16:50:56.976449 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:56Z","lastTransitionTime":"2026-02-02T16:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.079095 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.079133 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.079143 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.079206 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.079218 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:57Z","lastTransitionTime":"2026-02-02T16:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.161956 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 03:31:09.797966632 +0000 UTC Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.182164 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.182243 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.182325 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.182371 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.182396 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:57Z","lastTransitionTime":"2026-02-02T16:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.188776 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.188802 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:57 crc kubenswrapper[4835]: E0202 16:50:57.188952 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:50:57 crc kubenswrapper[4835]: E0202 16:50:57.189024 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.285345 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.285416 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.285437 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.285463 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.285481 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:57Z","lastTransitionTime":"2026-02-02T16:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.387794 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.387841 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.387860 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.387884 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.387902 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:57Z","lastTransitionTime":"2026-02-02T16:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.491316 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.491395 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.491426 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.491454 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.491476 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:57Z","lastTransitionTime":"2026-02-02T16:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.527524 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/2.log" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.538890 4835 scope.go:117] "RemoveContainer" containerID="d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070" Feb 02 16:50:57 crc kubenswrapper[4835]: E0202 16:50:57.539544 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\"" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.555550 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.588173 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.594977 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.595042 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.595059 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.595082 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.595099 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:57Z","lastTransitionTime":"2026-02-02T16:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.610096 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.629404 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.646391 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.666786 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.697742 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.697841 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.697863 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.697928 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.697951 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:57Z","lastTransitionTime":"2026-02-02T16:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.702624 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:56Z\\\",\\\"message\\\":\\\"}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 16:50:56.110261 6451 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110442 6451 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110466 6451 ovn.go:134] Ensuring zone local for Pod openshift-kube-controller-manager/kube-controller-manager-crc in node crc\\\\nI0202 16:50:56.110370 6451 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nF0202 16:50:56.109968 6451 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.722966 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.742192 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.763151 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.783057 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.801240 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.801297 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.801309 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.801326 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.801338 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:57Z","lastTransitionTime":"2026-02-02T16:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.801707 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.817140 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.831866 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.847069 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.870052 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.881725 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:57Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.903608 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.903679 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.903694 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.903711 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:57 crc kubenswrapper[4835]: I0202 16:50:57.903722 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:57Z","lastTransitionTime":"2026-02-02T16:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.006028 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.006081 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.006091 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.006106 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.006118 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:58Z","lastTransitionTime":"2026-02-02T16:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.108962 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.109049 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.109075 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.109108 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.109151 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:58Z","lastTransitionTime":"2026-02-02T16:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.163077 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 05:28:57.496284873 +0000 UTC Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.188638 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.188638 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:50:58 crc kubenswrapper[4835]: E0202 16:50:58.188805 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:50:58 crc kubenswrapper[4835]: E0202 16:50:58.188870 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.212161 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.212236 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.212296 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.212331 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.212348 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:58Z","lastTransitionTime":"2026-02-02T16:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.315983 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.316075 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.316087 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.316107 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.316119 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:58Z","lastTransitionTime":"2026-02-02T16:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.419452 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.419500 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.419514 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.419530 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.419541 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:58Z","lastTransitionTime":"2026-02-02T16:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.523563 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.523611 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.523623 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.523639 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.523652 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:58Z","lastTransitionTime":"2026-02-02T16:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.626838 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.626897 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.626920 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.626952 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.626976 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:58Z","lastTransitionTime":"2026-02-02T16:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.729693 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.729755 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.729772 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.729798 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.729820 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:58Z","lastTransitionTime":"2026-02-02T16:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.832617 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.832713 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.832732 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.832762 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.832782 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:58Z","lastTransitionTime":"2026-02-02T16:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.936232 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.936321 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.936340 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.936364 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:58 crc kubenswrapper[4835]: I0202 16:50:58.936382 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:58Z","lastTransitionTime":"2026-02-02T16:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.039343 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.039389 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.039407 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.039431 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.039451 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:59Z","lastTransitionTime":"2026-02-02T16:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.142665 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.142727 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.142744 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.142768 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.142784 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:59Z","lastTransitionTime":"2026-02-02T16:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.163455 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 13:03:25.683386874 +0000 UTC Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.188154 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.188257 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:59 crc kubenswrapper[4835]: E0202 16:50:59.188442 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:50:59 crc kubenswrapper[4835]: E0202 16:50:59.188586 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.212231 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.234251 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.245823 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.245874 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.245887 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.245910 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.245924 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:59Z","lastTransitionTime":"2026-02-02T16:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.256539 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.277691 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.303068 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.321038 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.341586 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.349226 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.349329 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.349357 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.349386 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.349410 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:59Z","lastTransitionTime":"2026-02-02T16:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.357967 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.380614 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.397381 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.414785 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.450941 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.453166 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.453247 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.453304 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.453342 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.453369 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:59Z","lastTransitionTime":"2026-02-02T16:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.474122 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.487485 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:50:59 crc kubenswrapper[4835]: E0202 16:50:59.487739 4835 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:59 crc kubenswrapper[4835]: E0202 16:50:59.487847 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs podName:5f2e42e3-ff22-4273-9a65-d7e55792155e nodeName:}" failed. No retries permitted until 2026-02-02 16:51:15.487819981 +0000 UTC m=+67.109424101 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs") pod "network-metrics-daemon-fbl8t" (UID: "5f2e42e3-ff22-4273-9a65-d7e55792155e") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.494481 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.510027 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.525393 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.550825 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:56Z\\\",\\\"message\\\":\\\"}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 16:50:56.110261 6451 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110442 6451 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110466 6451 ovn.go:134] Ensuring zone local for Pod openshift-kube-controller-manager/kube-controller-manager-crc in node crc\\\\nI0202 16:50:56.110370 6451 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nF0202 16:50:56.109968 6451 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:50:59Z is after 2025-08-24T17:21:41Z" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.555390 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.555443 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.555458 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.555480 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.555495 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:59Z","lastTransitionTime":"2026-02-02T16:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.658509 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.658600 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.658620 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.658645 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.658662 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:59Z","lastTransitionTime":"2026-02-02T16:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.761567 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.761644 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.761671 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.761700 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.761721 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:59Z","lastTransitionTime":"2026-02-02T16:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.869123 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.869205 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.869264 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.869334 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.869358 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:59Z","lastTransitionTime":"2026-02-02T16:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.971613 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.971646 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.971654 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.971668 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:50:59 crc kubenswrapper[4835]: I0202 16:50:59.971677 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:50:59Z","lastTransitionTime":"2026-02-02T16:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.073681 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.073724 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.073733 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.073748 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.073760 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:00Z","lastTransitionTime":"2026-02-02T16:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.163929 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 07:30:59.978695667 +0000 UTC Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.176088 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.176141 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.176161 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.176185 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.176203 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:00Z","lastTransitionTime":"2026-02-02T16:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.188499 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.188582 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:00 crc kubenswrapper[4835]: E0202 16:51:00.188663 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:00 crc kubenswrapper[4835]: E0202 16:51:00.188810 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.278821 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.278852 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.278862 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.278875 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.278884 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:00Z","lastTransitionTime":"2026-02-02T16:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.382003 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.382048 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.382061 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.382077 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.382088 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:00Z","lastTransitionTime":"2026-02-02T16:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.484758 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.485077 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.485336 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.485566 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.485801 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:00Z","lastTransitionTime":"2026-02-02T16:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.589320 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.589794 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.589965 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.590134 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.590320 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:00Z","lastTransitionTime":"2026-02-02T16:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.693712 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.693742 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.693752 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.693768 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.693780 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:00Z","lastTransitionTime":"2026-02-02T16:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.796897 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.796955 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.796972 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.796992 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.797011 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:00Z","lastTransitionTime":"2026-02-02T16:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.899865 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.899913 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.899929 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.899951 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:00 crc kubenswrapper[4835]: I0202 16:51:00.899969 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:00Z","lastTransitionTime":"2026-02-02T16:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.002674 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.002729 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.002747 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.002769 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.002785 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:01Z","lastTransitionTime":"2026-02-02T16:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.002931 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.003075 4835 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.003158 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:51:33.003140793 +0000 UTC m=+84.624744873 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.104553 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.104704 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.104752 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.104795 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.104927 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:51:33.104873825 +0000 UTC m=+84.726477945 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.104965 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.104993 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.105014 4835 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.105083 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 16:51:33.105061901 +0000 UTC m=+84.726666021 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.105166 4835 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.105218 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:51:33.105204535 +0000 UTC m=+84.726808645 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.105350 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.105394 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.105423 4835 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.105502 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 16:51:33.105480093 +0000 UTC m=+84.727084323 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.106512 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.106562 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.106581 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.106603 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.106621 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:01Z","lastTransitionTime":"2026-02-02T16:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.164761 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 16:42:48.158565334 +0000 UTC Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.188867 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.189062 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.189180 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:01 crc kubenswrapper[4835]: E0202 16:51:01.189407 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.208724 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.208784 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.208809 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.208837 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.208860 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:01Z","lastTransitionTime":"2026-02-02T16:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.311916 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.311958 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.311972 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.311989 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.312002 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:01Z","lastTransitionTime":"2026-02-02T16:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.415638 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.416013 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.416026 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.416043 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.416056 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:01Z","lastTransitionTime":"2026-02-02T16:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.518683 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.518709 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.518717 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.518729 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.518737 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:01Z","lastTransitionTime":"2026-02-02T16:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.622151 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.622194 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.622205 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.622224 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.622237 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:01Z","lastTransitionTime":"2026-02-02T16:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.724909 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.724951 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.724963 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.724980 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.724992 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:01Z","lastTransitionTime":"2026-02-02T16:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.827001 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.827047 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.827061 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.827078 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.827089 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:01Z","lastTransitionTime":"2026-02-02T16:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.929763 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.929807 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.929819 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.929836 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:01 crc kubenswrapper[4835]: I0202 16:51:01.929847 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:01Z","lastTransitionTime":"2026-02-02T16:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.032114 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.032170 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.032181 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.032194 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.032206 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:02Z","lastTransitionTime":"2026-02-02T16:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.134779 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.134812 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.134823 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.134838 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.134848 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:02Z","lastTransitionTime":"2026-02-02T16:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.165494 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 08:19:23.005755286 +0000 UTC Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.188857 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.188931 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:02 crc kubenswrapper[4835]: E0202 16:51:02.189207 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:02 crc kubenswrapper[4835]: E0202 16:51:02.189338 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.237163 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.237190 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.237199 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.237211 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.237222 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:02Z","lastTransitionTime":"2026-02-02T16:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.339414 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.339452 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.339466 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.339481 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.339493 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:02Z","lastTransitionTime":"2026-02-02T16:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.446479 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.446535 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.446554 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.446577 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.446594 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:02Z","lastTransitionTime":"2026-02-02T16:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.550111 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.550163 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.550181 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.550203 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.550220 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:02Z","lastTransitionTime":"2026-02-02T16:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.653601 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.653712 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.653732 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.653755 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.653774 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:02Z","lastTransitionTime":"2026-02-02T16:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.755657 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.755698 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.755709 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.755724 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.755735 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:02Z","lastTransitionTime":"2026-02-02T16:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.858570 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.858642 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.858666 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.858696 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.858717 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:02Z","lastTransitionTime":"2026-02-02T16:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.962164 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.962235 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.962258 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.962323 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:02 crc kubenswrapper[4835]: I0202 16:51:02.962348 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:02Z","lastTransitionTime":"2026-02-02T16:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.066069 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.066110 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.066119 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.066135 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.066145 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:03Z","lastTransitionTime":"2026-02-02T16:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.166047 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 08:43:02.072364998 +0000 UTC Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.169432 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.169470 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.169481 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.169497 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.169507 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:03Z","lastTransitionTime":"2026-02-02T16:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.188085 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:03 crc kubenswrapper[4835]: E0202 16:51:03.188264 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.188329 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:03 crc kubenswrapper[4835]: E0202 16:51:03.188568 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.272572 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.272635 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.272652 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.272676 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.272696 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:03Z","lastTransitionTime":"2026-02-02T16:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.374712 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.374771 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.374785 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.374802 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.374811 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:03Z","lastTransitionTime":"2026-02-02T16:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.477016 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.477077 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.477100 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.477130 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.477153 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:03Z","lastTransitionTime":"2026-02-02T16:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.580337 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.580416 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.580440 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.580473 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.580498 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:03Z","lastTransitionTime":"2026-02-02T16:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.682902 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.682936 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.682946 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.682975 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.682985 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:03Z","lastTransitionTime":"2026-02-02T16:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.785953 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.785997 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.786010 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.786028 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.786044 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:03Z","lastTransitionTime":"2026-02-02T16:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.888607 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.888650 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.888668 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.888690 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.888707 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:03Z","lastTransitionTime":"2026-02-02T16:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.991225 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.991376 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.991398 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.991422 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:03 crc kubenswrapper[4835]: I0202 16:51:03.991439 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:03Z","lastTransitionTime":"2026-02-02T16:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.095098 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.095153 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.095172 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.095197 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.095217 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:04Z","lastTransitionTime":"2026-02-02T16:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.166751 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 06:14:21.901051137 +0000 UTC Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.188320 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.188377 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:04 crc kubenswrapper[4835]: E0202 16:51:04.188455 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:04 crc kubenswrapper[4835]: E0202 16:51:04.188564 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.198787 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.198834 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.198846 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.198862 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.198875 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:04Z","lastTransitionTime":"2026-02-02T16:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.302437 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.302754 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.302898 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.303051 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.303198 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:04Z","lastTransitionTime":"2026-02-02T16:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.406335 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.406395 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.406411 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.406434 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.406451 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:04Z","lastTransitionTime":"2026-02-02T16:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.509509 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.509774 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.509946 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.510138 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.510596 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:04Z","lastTransitionTime":"2026-02-02T16:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.613852 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.614203 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.614441 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.614693 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.614872 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:04Z","lastTransitionTime":"2026-02-02T16:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.718477 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.718545 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.718557 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.718574 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.718586 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:04Z","lastTransitionTime":"2026-02-02T16:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.822054 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.822918 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.822964 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.822991 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.823009 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:04Z","lastTransitionTime":"2026-02-02T16:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.889936 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.901783 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.912460 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:04Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.925904 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.925940 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.925951 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.925968 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.925980 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:04Z","lastTransitionTime":"2026-02-02T16:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.931330 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:04Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.952243 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:04Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.965980 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:04Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.978758 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:04Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:04 crc kubenswrapper[4835]: I0202 16:51:04.994696 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:04Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.006587 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.025138 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.029343 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.029402 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.029421 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.029445 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.029463 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.039823 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.054016 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.073250 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.088663 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.104169 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.122957 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.131601 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.131643 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.131654 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.131675 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.131686 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.145296 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:56Z\\\",\\\"message\\\":\\\"}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 16:50:56.110261 6451 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110442 6451 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110466 6451 ovn.go:134] Ensuring zone local for Pod openshift-kube-controller-manager/kube-controller-manager-crc in node crc\\\\nI0202 16:50:56.110370 6451 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nF0202 16:50:56.109968 6451 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.158821 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.167803 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 15:25:34.195294749 +0000 UTC Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.176450 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.188624 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.188665 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:05 crc kubenswrapper[4835]: E0202 16:51:05.188767 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:05 crc kubenswrapper[4835]: E0202 16:51:05.188910 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.234016 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.234094 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.234125 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.234150 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.234168 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.338479 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.338539 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.338551 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.338569 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.338581 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.388289 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.388342 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.388356 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.388375 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.388387 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: E0202 16:51:05.399359 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.402855 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.402915 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.402932 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.402957 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.402976 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: E0202 16:51:05.420157 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.424046 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.424087 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.424103 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.424127 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.424144 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: E0202 16:51:05.439574 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.443802 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.443834 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.443843 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.443858 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.443868 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: E0202 16:51:05.458577 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.461918 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.462053 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.462146 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.462239 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.462382 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: E0202 16:51:05.473686 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:05Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:05 crc kubenswrapper[4835]: E0202 16:51:05.473796 4835 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.475492 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.475607 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.475686 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.475773 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.475847 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.578656 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.578737 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.578763 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.578794 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.578821 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.681999 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.682073 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.682095 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.682134 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.682173 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.785726 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.785784 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.785803 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.785825 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.785842 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.888534 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.888576 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.888587 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.888603 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.888614 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.991939 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.991994 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.992011 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.992034 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:05 crc kubenswrapper[4835]: I0202 16:51:05.992051 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:05Z","lastTransitionTime":"2026-02-02T16:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.095570 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.095638 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.095650 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.095664 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.095674 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:06Z","lastTransitionTime":"2026-02-02T16:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.168836 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 17:23:34.075884644 +0000 UTC Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.188235 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.188318 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:06 crc kubenswrapper[4835]: E0202 16:51:06.188411 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:06 crc kubenswrapper[4835]: E0202 16:51:06.188563 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.198594 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.198669 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.198681 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.198696 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.198708 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:06Z","lastTransitionTime":"2026-02-02T16:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.302160 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.302215 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.302233 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.302256 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.302306 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:06Z","lastTransitionTime":"2026-02-02T16:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.405797 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.405860 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.405876 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.405900 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.405918 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:06Z","lastTransitionTime":"2026-02-02T16:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.508928 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.508974 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.508985 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.509001 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.509012 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:06Z","lastTransitionTime":"2026-02-02T16:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.611591 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.611661 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.611682 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.611710 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.611728 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:06Z","lastTransitionTime":"2026-02-02T16:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.715098 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.715159 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.715175 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.715199 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.715218 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:06Z","lastTransitionTime":"2026-02-02T16:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.817641 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.817705 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.817721 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.817752 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.817769 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:06Z","lastTransitionTime":"2026-02-02T16:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.919978 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.920025 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.920038 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.920052 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:06 crc kubenswrapper[4835]: I0202 16:51:06.920064 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:06Z","lastTransitionTime":"2026-02-02T16:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.022968 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.023074 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.023095 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.023120 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.023138 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:07Z","lastTransitionTime":"2026-02-02T16:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.125835 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.125899 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.125924 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.125953 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.125973 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:07Z","lastTransitionTime":"2026-02-02T16:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.169613 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 00:27:11.080295317 +0000 UTC Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.187952 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:07 crc kubenswrapper[4835]: E0202 16:51:07.188070 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.187959 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:07 crc kubenswrapper[4835]: E0202 16:51:07.188264 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.228814 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.228884 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.228904 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.229328 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.229375 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:07Z","lastTransitionTime":"2026-02-02T16:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.332619 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.332690 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.332715 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.332761 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.332785 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:07Z","lastTransitionTime":"2026-02-02T16:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.436056 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.436129 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.436150 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.436175 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.436192 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:07Z","lastTransitionTime":"2026-02-02T16:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.539034 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.539102 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.539121 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.539145 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.539163 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:07Z","lastTransitionTime":"2026-02-02T16:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.643212 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.643302 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.643314 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.643339 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.643355 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:07Z","lastTransitionTime":"2026-02-02T16:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.746316 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.746393 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.746454 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.746518 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.746545 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:07Z","lastTransitionTime":"2026-02-02T16:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.849705 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.849762 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.849778 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.849801 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.849842 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:07Z","lastTransitionTime":"2026-02-02T16:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.952744 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.952804 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.952821 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.952843 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:07 crc kubenswrapper[4835]: I0202 16:51:07.952860 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:07Z","lastTransitionTime":"2026-02-02T16:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.055134 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.055171 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.055187 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.055207 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.055220 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:08Z","lastTransitionTime":"2026-02-02T16:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.157630 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.157691 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.157711 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.157738 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.157758 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:08Z","lastTransitionTime":"2026-02-02T16:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.170441 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 05:07:27.904707763 +0000 UTC Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.188050 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.188080 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:08 crc kubenswrapper[4835]: E0202 16:51:08.188318 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:08 crc kubenswrapper[4835]: E0202 16:51:08.188440 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.260827 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.260890 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.260907 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.260932 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.260955 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:08Z","lastTransitionTime":"2026-02-02T16:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.365248 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.365654 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.365798 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.365943 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.366073 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:08Z","lastTransitionTime":"2026-02-02T16:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.468744 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.468793 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.468805 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.468820 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.468831 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:08Z","lastTransitionTime":"2026-02-02T16:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.572331 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.572386 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.572396 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.572409 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.572418 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:08Z","lastTransitionTime":"2026-02-02T16:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.674760 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.674818 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.674835 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.674858 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.674875 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:08Z","lastTransitionTime":"2026-02-02T16:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.778021 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.778102 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.778134 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.778163 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.778185 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:08Z","lastTransitionTime":"2026-02-02T16:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.880957 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.881036 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.881058 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.881086 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.881109 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:08Z","lastTransitionTime":"2026-02-02T16:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.983998 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.984057 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.984069 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.984086 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:08 crc kubenswrapper[4835]: I0202 16:51:08.984097 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:08Z","lastTransitionTime":"2026-02-02T16:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.086302 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.086363 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.086382 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.086406 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.086427 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:09Z","lastTransitionTime":"2026-02-02T16:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.171611 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 01:51:09.722449611 +0000 UTC Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.188878 4835 scope.go:117] "RemoveContainer" containerID="d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070" Feb 02 16:51:09 crc kubenswrapper[4835]: E0202 16:51:09.189033 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\"" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.189206 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.189239 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:09 crc kubenswrapper[4835]: E0202 16:51:09.189326 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:09 crc kubenswrapper[4835]: E0202 16:51:09.189379 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.190194 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.190227 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.190237 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.190251 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.190259 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:09Z","lastTransitionTime":"2026-02-02T16:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.204242 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.220738 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.241370 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.255346 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"359bbaaa-5447-4b3a-ac70-c794ebb86542\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43653581ec05560f58ac4c3d03ff9f5f9e105627add9e4f56026b14662543960\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02837b8c6c3c8ccd492b47276b8b7fc513a584d29d7c3d686f7104458663c9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac865c5450ef4d9e733ccbd390c1ef55d6049278ff6174dad3bd16bdfa57e037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.268488 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.292234 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.292290 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.292304 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.292322 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.292334 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:09Z","lastTransitionTime":"2026-02-02T16:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.321553 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.334519 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.342829 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.359330 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.372167 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.388060 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.408341 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:56Z\\\",\\\"message\\\":\\\"}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 16:50:56.110261 6451 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110442 6451 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110466 6451 ovn.go:134] Ensuring zone local for Pod openshift-kube-controller-manager/kube-controller-manager-crc in node crc\\\\nI0202 16:50:56.110370 6451 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nF0202 16:50:56.109968 6451 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.410543 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.410607 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.410626 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.410648 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.410663 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:09Z","lastTransitionTime":"2026-02-02T16:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.424073 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.448173 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.465823 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.479314 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.497372 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.513431 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.513501 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.513515 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.513830 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.513866 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:09Z","lastTransitionTime":"2026-02-02T16:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.520149 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:09Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.617907 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.617989 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.618014 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.618044 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.618067 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:09Z","lastTransitionTime":"2026-02-02T16:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.721015 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.721084 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.721108 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.721141 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.721176 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:09Z","lastTransitionTime":"2026-02-02T16:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.824457 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.824538 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.824562 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.824587 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.824606 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:09Z","lastTransitionTime":"2026-02-02T16:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.927983 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.928252 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.928318 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.928363 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:09 crc kubenswrapper[4835]: I0202 16:51:09.928391 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:09Z","lastTransitionTime":"2026-02-02T16:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.031589 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.033144 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.033358 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.033512 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.033765 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:10Z","lastTransitionTime":"2026-02-02T16:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.137612 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.137681 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.137706 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.137740 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.137763 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:10Z","lastTransitionTime":"2026-02-02T16:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.172381 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 13:29:43.247736078 +0000 UTC Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.188766 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.188789 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:10 crc kubenswrapper[4835]: E0202 16:51:10.188955 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:10 crc kubenswrapper[4835]: E0202 16:51:10.189130 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.239996 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.240144 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.240167 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.240194 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.240214 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:10Z","lastTransitionTime":"2026-02-02T16:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.342938 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.343010 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.343034 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.343062 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.343083 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:10Z","lastTransitionTime":"2026-02-02T16:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.445462 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.445498 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.445517 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.445542 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.445557 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:10Z","lastTransitionTime":"2026-02-02T16:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.549187 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.549512 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.549616 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.549702 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.549793 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:10Z","lastTransitionTime":"2026-02-02T16:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.659093 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.659545 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.660148 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.660647 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.661052 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:10Z","lastTransitionTime":"2026-02-02T16:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.764623 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.764665 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.764674 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.764689 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.764700 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:10Z","lastTransitionTime":"2026-02-02T16:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.867017 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.867054 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.867062 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.867076 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.867085 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:10Z","lastTransitionTime":"2026-02-02T16:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.969865 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.970222 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.970433 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.970644 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:10 crc kubenswrapper[4835]: I0202 16:51:10.970820 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:10Z","lastTransitionTime":"2026-02-02T16:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.073718 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.073766 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.073775 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.073792 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.073802 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:11Z","lastTransitionTime":"2026-02-02T16:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.172843 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 06:56:35.295330981 +0000 UTC Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.176079 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.176130 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.176143 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.176163 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.176175 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:11Z","lastTransitionTime":"2026-02-02T16:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.188698 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.188811 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:11 crc kubenswrapper[4835]: E0202 16:51:11.188937 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:11 crc kubenswrapper[4835]: E0202 16:51:11.189029 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.279189 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.279245 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.279261 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.279316 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.279336 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:11Z","lastTransitionTime":"2026-02-02T16:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.382695 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.382750 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.382766 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.382789 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.382806 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:11Z","lastTransitionTime":"2026-02-02T16:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.485691 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.485740 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.485757 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.485778 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.485797 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:11Z","lastTransitionTime":"2026-02-02T16:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.588089 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.588144 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.588165 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.588192 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.588214 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:11Z","lastTransitionTime":"2026-02-02T16:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.690979 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.691018 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.691029 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.691047 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.691059 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:11Z","lastTransitionTime":"2026-02-02T16:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.793535 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.793605 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.793630 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.793660 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.793683 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:11Z","lastTransitionTime":"2026-02-02T16:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.895915 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.895949 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.895958 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.895975 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.895986 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:11Z","lastTransitionTime":"2026-02-02T16:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.999388 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.999431 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.999439 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.999455 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:11 crc kubenswrapper[4835]: I0202 16:51:11.999464 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:11Z","lastTransitionTime":"2026-02-02T16:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.102582 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.102640 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.102659 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.102686 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.102705 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:12Z","lastTransitionTime":"2026-02-02T16:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.173024 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 05:53:21.095442682 +0000 UTC Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.188449 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:12 crc kubenswrapper[4835]: E0202 16:51:12.188564 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.188744 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:12 crc kubenswrapper[4835]: E0202 16:51:12.188808 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.205126 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.205154 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.205169 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.205197 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.205207 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:12Z","lastTransitionTime":"2026-02-02T16:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.307721 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.307780 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.307794 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.307808 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.307819 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:12Z","lastTransitionTime":"2026-02-02T16:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.409907 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.409991 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.410017 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.410052 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.410076 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:12Z","lastTransitionTime":"2026-02-02T16:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.512676 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.512719 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.512728 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.512742 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.512754 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:12Z","lastTransitionTime":"2026-02-02T16:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.615942 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.616039 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.616056 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.616075 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.616087 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:12Z","lastTransitionTime":"2026-02-02T16:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.718335 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.718374 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.718383 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.718398 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.718407 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:12Z","lastTransitionTime":"2026-02-02T16:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.820722 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.820810 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.820823 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.820846 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.820860 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:12Z","lastTransitionTime":"2026-02-02T16:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.922761 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.922839 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.922862 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.922888 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:12 crc kubenswrapper[4835]: I0202 16:51:12.922913 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:12Z","lastTransitionTime":"2026-02-02T16:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.025797 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.025845 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.025854 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.025870 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.025880 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:13Z","lastTransitionTime":"2026-02-02T16:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.128045 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.128085 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.128094 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.128109 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.128119 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:13Z","lastTransitionTime":"2026-02-02T16:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.174021 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 05:34:33.906067514 +0000 UTC Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.188388 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.188486 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:13 crc kubenswrapper[4835]: E0202 16:51:13.188546 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:13 crc kubenswrapper[4835]: E0202 16:51:13.188643 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.230350 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.230400 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.230435 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.230485 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.230500 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:13Z","lastTransitionTime":"2026-02-02T16:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.334199 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.334243 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.334252 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.334266 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.334292 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:13Z","lastTransitionTime":"2026-02-02T16:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.436090 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.436159 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.436174 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.436192 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.436204 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:13Z","lastTransitionTime":"2026-02-02T16:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.538715 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.538976 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.539087 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.539182 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.539295 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:13Z","lastTransitionTime":"2026-02-02T16:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.643621 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.643652 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.643660 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.643696 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.643705 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:13Z","lastTransitionTime":"2026-02-02T16:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.746576 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.746633 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.746650 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.746671 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.746687 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:13Z","lastTransitionTime":"2026-02-02T16:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.848933 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.848972 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.848982 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.848997 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.849006 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:13Z","lastTransitionTime":"2026-02-02T16:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.951865 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.951907 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.951916 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.951929 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:13 crc kubenswrapper[4835]: I0202 16:51:13.951938 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:13Z","lastTransitionTime":"2026-02-02T16:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.054391 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.054452 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.054470 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.054493 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.054513 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:14Z","lastTransitionTime":"2026-02-02T16:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.157506 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.157546 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.157555 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.157571 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.157581 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:14Z","lastTransitionTime":"2026-02-02T16:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.174394 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 23:29:30.844637263 +0000 UTC Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.188077 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.188139 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:14 crc kubenswrapper[4835]: E0202 16:51:14.188201 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:14 crc kubenswrapper[4835]: E0202 16:51:14.188356 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.260150 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.260213 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.260230 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.260251 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.260267 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:14Z","lastTransitionTime":"2026-02-02T16:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.363605 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.363648 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.363657 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.363672 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.363682 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:14Z","lastTransitionTime":"2026-02-02T16:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.466601 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.466645 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.466653 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.466668 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.466677 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:14Z","lastTransitionTime":"2026-02-02T16:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.569598 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.569642 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.569656 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.569676 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.569693 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:14Z","lastTransitionTime":"2026-02-02T16:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.672380 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.672420 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.672430 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.672445 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.672456 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:14Z","lastTransitionTime":"2026-02-02T16:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.774731 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.774781 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.774793 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.774810 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.774821 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:14Z","lastTransitionTime":"2026-02-02T16:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.877956 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.878019 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.878035 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.878060 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.878076 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:14Z","lastTransitionTime":"2026-02-02T16:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.980152 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.980192 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.980203 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.980220 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:14 crc kubenswrapper[4835]: I0202 16:51:14.980233 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:14Z","lastTransitionTime":"2026-02-02T16:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.086225 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.086266 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.086292 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.086309 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.086321 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.174511 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 21:03:32.444020272 +0000 UTC Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.187853 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.187927 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:15 crc kubenswrapper[4835]: E0202 16:51:15.187979 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:15 crc kubenswrapper[4835]: E0202 16:51:15.188053 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.189039 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.189072 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.189081 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.189096 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.189107 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.292839 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.292881 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.292895 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.292913 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.292924 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.395555 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.395592 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.395602 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.395616 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.395625 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.498256 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.498351 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.498373 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.498398 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.498420 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.552505 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.552561 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.552577 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.552600 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.552619 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: E0202 16:51:15.575131 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:15Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.579135 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.579174 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.579185 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.579202 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.579212 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.582497 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:15 crc kubenswrapper[4835]: E0202 16:51:15.582671 4835 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:51:15 crc kubenswrapper[4835]: E0202 16:51:15.582742 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs podName:5f2e42e3-ff22-4273-9a65-d7e55792155e nodeName:}" failed. No retries permitted until 2026-02-02 16:51:47.582720397 +0000 UTC m=+99.204324477 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs") pod "network-metrics-daemon-fbl8t" (UID: "5f2e42e3-ff22-4273-9a65-d7e55792155e") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:51:15 crc kubenswrapper[4835]: E0202 16:51:15.594328 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:15Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.598761 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.598805 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.598817 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.598834 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.598849 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: E0202 16:51:15.613121 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:15Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.616752 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.616802 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.616814 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.616837 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.616850 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: E0202 16:51:15.628217 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:15Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.632561 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.632603 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.632615 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.632633 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.632645 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: E0202 16:51:15.658412 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:15Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:15 crc kubenswrapper[4835]: E0202 16:51:15.658647 4835 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.660418 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.660458 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.660468 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.660484 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.660495 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.763244 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.763302 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.763314 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.763342 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.763351 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.865605 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.865646 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.865660 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.865675 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.865686 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.968723 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.968781 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.968798 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.968821 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:15 crc kubenswrapper[4835]: I0202 16:51:15.968838 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:15Z","lastTransitionTime":"2026-02-02T16:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.071613 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.071661 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.071675 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.071692 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.071703 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:16Z","lastTransitionTime":"2026-02-02T16:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.174153 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.174213 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.174232 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.174255 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.174293 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:16Z","lastTransitionTime":"2026-02-02T16:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.175318 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 19:51:17.826249478 +0000 UTC Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.188787 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.188813 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:16 crc kubenswrapper[4835]: E0202 16:51:16.188919 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:16 crc kubenswrapper[4835]: E0202 16:51:16.188998 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.277255 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.277330 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.277343 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.277360 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.277371 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:16Z","lastTransitionTime":"2026-02-02T16:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.380033 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.380072 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.380080 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.380094 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.380103 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:16Z","lastTransitionTime":"2026-02-02T16:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.482306 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.482352 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.482362 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.482377 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.482387 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:16Z","lastTransitionTime":"2026-02-02T16:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.584992 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.585068 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.585089 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.585113 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.585127 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:16Z","lastTransitionTime":"2026-02-02T16:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.687231 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.687290 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.687302 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.687322 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.687334 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:16Z","lastTransitionTime":"2026-02-02T16:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.789423 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.789470 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.789482 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.789499 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.789515 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:16Z","lastTransitionTime":"2026-02-02T16:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.891393 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.891433 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.891443 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.891458 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.891469 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:16Z","lastTransitionTime":"2026-02-02T16:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.993896 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.993936 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.993944 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.993960 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:16 crc kubenswrapper[4835]: I0202 16:51:16.993981 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:16Z","lastTransitionTime":"2026-02-02T16:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.096249 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.096326 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.096342 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.096360 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.096394 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:17Z","lastTransitionTime":"2026-02-02T16:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.175975 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 14:02:34.008899619 +0000 UTC Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.188455 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.188669 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:17 crc kubenswrapper[4835]: E0202 16:51:17.188804 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:17 crc kubenswrapper[4835]: E0202 16:51:17.189004 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.197946 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.197973 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.197986 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.198005 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.198018 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:17Z","lastTransitionTime":"2026-02-02T16:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.300992 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.301035 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.301048 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.301066 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.301081 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:17Z","lastTransitionTime":"2026-02-02T16:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.403262 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.403322 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.403331 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.403346 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.403356 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:17Z","lastTransitionTime":"2026-02-02T16:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.506436 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.506478 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.506492 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.506507 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.506518 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:17Z","lastTransitionTime":"2026-02-02T16:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.608931 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.608974 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.608985 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.609000 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.609010 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:17Z","lastTransitionTime":"2026-02-02T16:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.711211 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.711261 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.711298 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.711315 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.711326 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:17Z","lastTransitionTime":"2026-02-02T16:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.813845 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.813886 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.813898 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.813914 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.813927 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:17Z","lastTransitionTime":"2026-02-02T16:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.916174 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.916239 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.916249 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.916295 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:17 crc kubenswrapper[4835]: I0202 16:51:17.916311 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:17Z","lastTransitionTime":"2026-02-02T16:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.018383 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.018418 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.018427 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.018443 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.018517 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:18Z","lastTransitionTime":"2026-02-02T16:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.120894 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.120933 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.120942 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.120955 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.120966 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:18Z","lastTransitionTime":"2026-02-02T16:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.176773 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 16:02:58.152028665 +0000 UTC Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.188091 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:18 crc kubenswrapper[4835]: E0202 16:51:18.188352 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.188177 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:18 crc kubenswrapper[4835]: E0202 16:51:18.188669 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.223148 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.223189 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.223202 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.223219 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.223230 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:18Z","lastTransitionTime":"2026-02-02T16:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.329491 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.329535 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.329545 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.329560 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.329574 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:18Z","lastTransitionTime":"2026-02-02T16:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.431919 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.431971 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.431998 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.432011 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.432020 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:18Z","lastTransitionTime":"2026-02-02T16:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.534158 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.534440 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.534552 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.534653 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.534736 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:18Z","lastTransitionTime":"2026-02-02T16:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.608672 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hzst6_92da4528-a699-45b1-aed0-d49a382bf0a1/kube-multus/0.log" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.608717 4835 generic.go:334] "Generic (PLEG): container finished" podID="92da4528-a699-45b1-aed0-d49a382bf0a1" containerID="ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347" exitCode=1 Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.608744 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hzst6" event={"ID":"92da4528-a699-45b1-aed0-d49a382bf0a1","Type":"ContainerDied","Data":"ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347"} Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.609049 4835 scope.go:117] "RemoveContainer" containerID="ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.634637 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:56Z\\\",\\\"message\\\":\\\"}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 16:50:56.110261 6451 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110442 6451 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110466 6451 ovn.go:134] Ensuring zone local for Pod openshift-kube-controller-manager/kube-controller-manager-crc in node crc\\\\nI0202 16:50:56.110370 6451 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nF0202 16:50:56.109968 6451 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.637587 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.637626 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.637639 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.637653 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.637663 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:18Z","lastTransitionTime":"2026-02-02T16:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.649679 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.671660 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.683901 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.693390 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.706654 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.722991 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:17Z\\\",\\\"message\\\":\\\"2026-02-02T16:50:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d\\\\n2026-02-02T16:50:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d to /host/opt/cni/bin/\\\\n2026-02-02T16:50:32Z [verbose] multus-daemon started\\\\n2026-02-02T16:50:32Z [verbose] Readiness Indicator file check\\\\n2026-02-02T16:51:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.733488 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.739658 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.739712 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.739722 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.739739 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.739748 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:18Z","lastTransitionTime":"2026-02-02T16:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.744462 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.756670 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.766961 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"359bbaaa-5447-4b3a-ac70-c794ebb86542\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43653581ec05560f58ac4c3d03ff9f5f9e105627add9e4f56026b14662543960\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02837b8c6c3c8ccd492b47276b8b7fc513a584d29d7c3d686f7104458663c9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac865c5450ef4d9e733ccbd390c1ef55d6049278ff6174dad3bd16bdfa57e037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.777087 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.788162 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.799707 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.813970 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.823046 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.837784 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.841687 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.841720 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.841735 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.841752 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.841764 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:18Z","lastTransitionTime":"2026-02-02T16:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.849561 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:18Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.944244 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.944300 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.944312 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.944328 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:18 crc kubenswrapper[4835]: I0202 16:51:18.944338 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:18Z","lastTransitionTime":"2026-02-02T16:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.047061 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.047138 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.047160 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.047184 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.047198 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:19Z","lastTransitionTime":"2026-02-02T16:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.149180 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.149244 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.149255 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.149269 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.149294 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:19Z","lastTransitionTime":"2026-02-02T16:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.177460 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 21:30:44.52386369 +0000 UTC Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.187897 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.187943 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:19 crc kubenswrapper[4835]: E0202 16:51:19.188013 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:19 crc kubenswrapper[4835]: E0202 16:51:19.188058 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.205019 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.221977 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.236190 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.246568 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"359bbaaa-5447-4b3a-ac70-c794ebb86542\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43653581ec05560f58ac4c3d03ff9f5f9e105627add9e4f56026b14662543960\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02837b8c6c3c8ccd492b47276b8b7fc513a584d29d7c3d686f7104458663c9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac865c5450ef4d9e733ccbd390c1ef55d6049278ff6174dad3bd16bdfa57e037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.251129 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.251170 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.251182 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.251198 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.251209 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:19Z","lastTransitionTime":"2026-02-02T16:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.261554 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.275002 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.290856 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.302544 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.311687 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.326378 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.336575 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.353614 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.353644 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.353654 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.353671 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.353685 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:19Z","lastTransitionTime":"2026-02-02T16:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.355479 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:56Z\\\",\\\"message\\\":\\\"}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 16:50:56.110261 6451 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110442 6451 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110466 6451 ovn.go:134] Ensuring zone local for Pod openshift-kube-controller-manager/kube-controller-manager-crc in node crc\\\\nI0202 16:50:56.110370 6451 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nF0202 16:50:56.109968 6451 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.364424 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.380311 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.390101 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.399949 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.409508 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.421476 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:17Z\\\",\\\"message\\\":\\\"2026-02-02T16:50:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d\\\\n2026-02-02T16:50:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d to /host/opt/cni/bin/\\\\n2026-02-02T16:50:32Z [verbose] multus-daemon started\\\\n2026-02-02T16:50:32Z [verbose] Readiness Indicator file check\\\\n2026-02-02T16:51:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.456088 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.456119 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.456129 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.456142 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.456150 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:19Z","lastTransitionTime":"2026-02-02T16:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.558671 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.558734 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.558745 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.558769 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.558788 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:19Z","lastTransitionTime":"2026-02-02T16:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.615376 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hzst6_92da4528-a699-45b1-aed0-d49a382bf0a1/kube-multus/0.log" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.615465 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hzst6" event={"ID":"92da4528-a699-45b1-aed0-d49a382bf0a1","Type":"ContainerStarted","Data":"372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab"} Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.632267 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.647888 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.661772 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.661817 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.661830 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.661849 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.661862 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:19Z","lastTransitionTime":"2026-02-02T16:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.667516 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.681478 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"359bbaaa-5447-4b3a-ac70-c794ebb86542\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43653581ec05560f58ac4c3d03ff9f5f9e105627add9e4f56026b14662543960\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02837b8c6c3c8ccd492b47276b8b7fc513a584d29d7c3d686f7104458663c9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac865c5450ef4d9e733ccbd390c1ef55d6049278ff6174dad3bd16bdfa57e037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.695077 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.711007 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.727701 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.739291 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.753902 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.763904 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.763949 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.763960 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.763976 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.763989 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:19Z","lastTransitionTime":"2026-02-02T16:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.764985 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.777585 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:17Z\\\",\\\"message\\\":\\\"2026-02-02T16:50:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d\\\\n2026-02-02T16:50:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d to /host/opt/cni/bin/\\\\n2026-02-02T16:50:32Z [verbose] multus-daemon started\\\\n2026-02-02T16:50:32Z [verbose] Readiness Indicator file check\\\\n2026-02-02T16:51:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.801606 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:56Z\\\",\\\"message\\\":\\\"}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 16:50:56.110261 6451 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110442 6451 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110466 6451 ovn.go:134] Ensuring zone local for Pod openshift-kube-controller-manager/kube-controller-manager-crc in node crc\\\\nI0202 16:50:56.110370 6451 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nF0202 16:50:56.109968 6451 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.810585 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.830040 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.844947 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.857910 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.866165 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.866224 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.866237 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.866254 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.866267 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:19Z","lastTransitionTime":"2026-02-02T16:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.871485 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.885958 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:19Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.969266 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.969317 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.969328 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.969342 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:19 crc kubenswrapper[4835]: I0202 16:51:19.969354 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:19Z","lastTransitionTime":"2026-02-02T16:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.071681 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.071721 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.071731 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.071746 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.071754 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:20Z","lastTransitionTime":"2026-02-02T16:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.174679 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.174730 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.174741 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.174758 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.174768 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:20Z","lastTransitionTime":"2026-02-02T16:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.177857 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 08:06:54.932080139 +0000 UTC Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.188240 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.188305 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:20 crc kubenswrapper[4835]: E0202 16:51:20.188357 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:20 crc kubenswrapper[4835]: E0202 16:51:20.188433 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.277684 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.277728 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.277737 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.277753 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.277762 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:20Z","lastTransitionTime":"2026-02-02T16:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.380919 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.380963 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.380974 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.380991 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.381004 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:20Z","lastTransitionTime":"2026-02-02T16:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.483684 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.483730 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.483741 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.483758 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.483768 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:20Z","lastTransitionTime":"2026-02-02T16:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.586691 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.586767 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.586784 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.586808 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.586828 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:20Z","lastTransitionTime":"2026-02-02T16:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.689171 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.689211 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.689220 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.689236 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.689248 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:20Z","lastTransitionTime":"2026-02-02T16:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.791595 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.791627 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.791635 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.791647 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.791655 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:20Z","lastTransitionTime":"2026-02-02T16:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.894069 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.894112 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.894121 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.894138 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.894147 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:20Z","lastTransitionTime":"2026-02-02T16:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.996032 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.996075 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.996093 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.996111 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:20 crc kubenswrapper[4835]: I0202 16:51:20.996123 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:20Z","lastTransitionTime":"2026-02-02T16:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.098677 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.098733 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.098750 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.098775 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.098791 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:21Z","lastTransitionTime":"2026-02-02T16:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.178701 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 19:34:01.868276689 +0000 UTC Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.188028 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.188230 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:21 crc kubenswrapper[4835]: E0202 16:51:21.188516 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:21 crc kubenswrapper[4835]: E0202 16:51:21.189070 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.189673 4835 scope.go:117] "RemoveContainer" containerID="d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.200536 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.200760 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.200908 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.201083 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.201225 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:21Z","lastTransitionTime":"2026-02-02T16:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.304021 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.304062 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.304071 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.304085 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.304098 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:21Z","lastTransitionTime":"2026-02-02T16:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.406938 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.406983 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.406994 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.407009 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.407020 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:21Z","lastTransitionTime":"2026-02-02T16:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.514141 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.514221 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.514311 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.514352 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.514365 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:21Z","lastTransitionTime":"2026-02-02T16:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.617366 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.617410 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.617422 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.617438 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.617459 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:21Z","lastTransitionTime":"2026-02-02T16:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.623443 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/2.log" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.625546 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289"} Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.626232 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.644765 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.662417 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"359bbaaa-5447-4b3a-ac70-c794ebb86542\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43653581ec05560f58ac4c3d03ff9f5f9e105627add9e4f56026b14662543960\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02837b8c6c3c8ccd492b47276b8b7fc513a584d29d7c3d686f7104458663c9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac865c5450ef4d9e733ccbd390c1ef55d6049278ff6174dad3bd16bdfa57e037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.676386 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.692922 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.709335 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.719547 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.719804 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.719832 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.719841 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.719856 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.719868 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:21Z","lastTransitionTime":"2026-02-02T16:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.731839 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.742009 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.756355 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.766609 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.784996 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.797303 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.808936 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.822304 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.822348 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.822360 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.822381 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.822392 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:21Z","lastTransitionTime":"2026-02-02T16:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.825698 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.843483 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:17Z\\\",\\\"message\\\":\\\"2026-02-02T16:50:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d\\\\n2026-02-02T16:50:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d to /host/opt/cni/bin/\\\\n2026-02-02T16:50:32Z [verbose] multus-daemon started\\\\n2026-02-02T16:50:32Z [verbose] Readiness Indicator file check\\\\n2026-02-02T16:51:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.866382 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:56Z\\\",\\\"message\\\":\\\"}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 16:50:56.110261 6451 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110442 6451 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110466 6451 ovn.go:134] Ensuring zone local for Pod openshift-kube-controller-manager/kube-controller-manager-crc in node crc\\\\nI0202 16:50:56.110370 6451 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nF0202 16:50:56.109968 6451 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.877630 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.889535 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:21Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.923952 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.923989 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.923998 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.924011 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:21 crc kubenswrapper[4835]: I0202 16:51:21.924020 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:21Z","lastTransitionTime":"2026-02-02T16:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.026639 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.026670 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.026679 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.026692 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.026701 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:22Z","lastTransitionTime":"2026-02-02T16:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.129401 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.129441 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.129449 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.129464 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.129474 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:22Z","lastTransitionTime":"2026-02-02T16:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.179847 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 10:33:51.759747505 +0000 UTC Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.188325 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:22 crc kubenswrapper[4835]: E0202 16:51:22.188461 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.188675 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:22 crc kubenswrapper[4835]: E0202 16:51:22.188729 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.231735 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.231762 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.231769 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.231782 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.231790 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:22Z","lastTransitionTime":"2026-02-02T16:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.335819 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.335865 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.335875 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.335891 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.335905 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:22Z","lastTransitionTime":"2026-02-02T16:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.439184 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.439257 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.439327 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.439359 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.439384 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:22Z","lastTransitionTime":"2026-02-02T16:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.541877 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.541917 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.541927 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.541942 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.541953 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:22Z","lastTransitionTime":"2026-02-02T16:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.630286 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/3.log" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.630757 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/2.log" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.633019 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" exitCode=1 Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.633054 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289"} Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.633083 4835 scope.go:117] "RemoveContainer" containerID="d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.633712 4835 scope.go:117] "RemoveContainer" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" Feb 02 16:51:22 crc kubenswrapper[4835]: E0202 16:51:22.633872 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\"" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.646611 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.646644 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.646653 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.646665 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.646676 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:22Z","lastTransitionTime":"2026-02-02T16:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.655873 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.667345 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.677846 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.689773 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.701115 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:17Z\\\",\\\"message\\\":\\\"2026-02-02T16:50:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d\\\\n2026-02-02T16:50:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d to /host/opt/cni/bin/\\\\n2026-02-02T16:50:32Z [verbose] multus-daemon started\\\\n2026-02-02T16:50:32Z [verbose] Readiness Indicator file check\\\\n2026-02-02T16:51:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.721334 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3680ac3f37206e99d3cf5e2ae22cfa7cc3ca05aa9e369270a91cf82c3e87070\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:50:56Z\\\",\\\"message\\\":\\\"}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 16:50:56.110261 6451 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110442 6451 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0202 16:50:56.110466 6451 ovn.go:134] Ensuring zone local for Pod openshift-kube-controller-manager/kube-controller-manager-crc in node crc\\\\nI0202 16:50:56.110370 6451 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nF0202 16:50:56.109968 6451 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:22Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 16:51:22.061526 6857 services_controller.go:453] Built service openshift-dns-operator/metrics template LB for network=default: []services.LB{}\\\\nF0202 16:51:22.061553 6857 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"ht\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.731691 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.741525 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.748821 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.748868 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.748877 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.748893 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.748903 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:22Z","lastTransitionTime":"2026-02-02T16:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.752812 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.766876 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"359bbaaa-5447-4b3a-ac70-c794ebb86542\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43653581ec05560f58ac4c3d03ff9f5f9e105627add9e4f56026b14662543960\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02837b8c6c3c8ccd492b47276b8b7fc513a584d29d7c3d686f7104458663c9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac865c5450ef4d9e733ccbd390c1ef55d6049278ff6174dad3bd16bdfa57e037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.780105 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.795758 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.811034 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.822380 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.835524 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.846475 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.851429 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.851491 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.851500 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.851515 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.851525 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:22Z","lastTransitionTime":"2026-02-02T16:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.861609 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.874001 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:22Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.954039 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.954079 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.954088 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.954101 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:22 crc kubenswrapper[4835]: I0202 16:51:22.954111 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:22Z","lastTransitionTime":"2026-02-02T16:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.056252 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.056307 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.056319 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.056333 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.056343 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:23Z","lastTransitionTime":"2026-02-02T16:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.158873 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.158903 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.158912 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.158925 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.158933 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:23Z","lastTransitionTime":"2026-02-02T16:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.180678 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 06:18:19.673883995 +0000 UTC Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.188173 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.188255 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:23 crc kubenswrapper[4835]: E0202 16:51:23.188338 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:23 crc kubenswrapper[4835]: E0202 16:51:23.188566 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.260930 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.260983 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.260994 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.261011 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.261022 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:23Z","lastTransitionTime":"2026-02-02T16:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.363373 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.363420 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.363431 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.363450 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.363468 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:23Z","lastTransitionTime":"2026-02-02T16:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.467251 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.467307 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.467319 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.467333 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.467343 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:23Z","lastTransitionTime":"2026-02-02T16:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.570211 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.570303 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.570327 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.570357 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.570380 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:23Z","lastTransitionTime":"2026-02-02T16:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.638846 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/3.log" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.643243 4835 scope.go:117] "RemoveContainer" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" Feb 02 16:51:23 crc kubenswrapper[4835]: E0202 16:51:23.643472 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\"" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.662739 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.672919 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.672982 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.672998 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.673024 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.673041 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:23Z","lastTransitionTime":"2026-02-02T16:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.689061 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.704361 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"359bbaaa-5447-4b3a-ac70-c794ebb86542\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43653581ec05560f58ac4c3d03ff9f5f9e105627add9e4f56026b14662543960\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02837b8c6c3c8ccd492b47276b8b7fc513a584d29d7c3d686f7104458663c9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac865c5450ef4d9e733ccbd390c1ef55d6049278ff6174dad3bd16bdfa57e037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.718546 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.731576 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.748113 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.765752 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.775764 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.775819 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.775835 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.775860 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.775879 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:23Z","lastTransitionTime":"2026-02-02T16:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.778205 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.797474 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.811052 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.830579 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:22Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 16:51:22.061526 6857 services_controller.go:453] Built service openshift-dns-operator/metrics template LB for network=default: []services.LB{}\\\\nF0202 16:51:22.061553 6857 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"ht\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:51:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.843363 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.869172 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.878088 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.878158 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.878176 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.878201 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.878219 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:23Z","lastTransitionTime":"2026-02-02T16:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.883430 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.895999 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.908054 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.923259 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:17Z\\\",\\\"message\\\":\\\"2026-02-02T16:50:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d\\\\n2026-02-02T16:50:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d to /host/opt/cni/bin/\\\\n2026-02-02T16:50:32Z [verbose] multus-daemon started\\\\n2026-02-02T16:50:32Z [verbose] Readiness Indicator file check\\\\n2026-02-02T16:51:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:23 crc kubenswrapper[4835]: I0202 16:51:23.936939 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:23Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.016001 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.016054 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.016067 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.016083 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.016094 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:24Z","lastTransitionTime":"2026-02-02T16:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.118626 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.118674 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.118684 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.118698 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.118707 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:24Z","lastTransitionTime":"2026-02-02T16:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.181018 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 22:40:02.585524286 +0000 UTC Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.188661 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:24 crc kubenswrapper[4835]: E0202 16:51:24.188891 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.189325 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:24 crc kubenswrapper[4835]: E0202 16:51:24.189525 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.225948 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.226028 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.226071 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.226101 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.226124 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:24Z","lastTransitionTime":"2026-02-02T16:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.330008 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.330049 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.330058 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.330071 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.330081 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:24Z","lastTransitionTime":"2026-02-02T16:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.432228 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.432265 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.432302 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.432318 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.432327 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:24Z","lastTransitionTime":"2026-02-02T16:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.535382 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.535445 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.535455 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.535471 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.535483 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:24Z","lastTransitionTime":"2026-02-02T16:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.638025 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.638067 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.638078 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.638093 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.638104 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:24Z","lastTransitionTime":"2026-02-02T16:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.739890 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.739928 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.739936 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.739949 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.739958 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:24Z","lastTransitionTime":"2026-02-02T16:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.841887 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.841938 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.841948 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.841964 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.841975 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:24Z","lastTransitionTime":"2026-02-02T16:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.945070 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.945133 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.945153 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.945176 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:24 crc kubenswrapper[4835]: I0202 16:51:24.945193 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:24Z","lastTransitionTime":"2026-02-02T16:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.048570 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.048633 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.048645 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.048664 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.048676 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:25Z","lastTransitionTime":"2026-02-02T16:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.151201 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.151254 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.151266 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.151295 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.151305 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:25Z","lastTransitionTime":"2026-02-02T16:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.181781 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 06:09:34.603623949 +0000 UTC Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.188150 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.188335 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:25 crc kubenswrapper[4835]: E0202 16:51:25.188512 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:25 crc kubenswrapper[4835]: E0202 16:51:25.188647 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.254392 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.254449 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.254466 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.254490 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.254508 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:25Z","lastTransitionTime":"2026-02-02T16:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.356838 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.356923 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.356941 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.356965 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.356982 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:25Z","lastTransitionTime":"2026-02-02T16:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.458988 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.459024 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.459042 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.459056 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.459066 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:25Z","lastTransitionTime":"2026-02-02T16:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.562377 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.562463 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.562497 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.562526 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.562547 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:25Z","lastTransitionTime":"2026-02-02T16:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.664854 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.664937 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.664966 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.664999 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.665023 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:25Z","lastTransitionTime":"2026-02-02T16:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.768558 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.768627 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.768651 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.768673 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.768689 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:25Z","lastTransitionTime":"2026-02-02T16:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.871039 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.871091 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.871106 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.871125 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.871140 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:25Z","lastTransitionTime":"2026-02-02T16:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.974828 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.974920 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.974947 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.974977 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:25 crc kubenswrapper[4835]: I0202 16:51:25.975001 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:25Z","lastTransitionTime":"2026-02-02T16:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.059579 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.059632 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.059642 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.059661 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.059672 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:26Z","lastTransitionTime":"2026-02-02T16:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:26 crc kubenswrapper[4835]: E0202 16:51:26.074169 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:26Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.082523 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.082592 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.082600 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.082614 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.082659 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:26Z","lastTransitionTime":"2026-02-02T16:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:26 crc kubenswrapper[4835]: E0202 16:51:26.099175 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:26Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.104705 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.104820 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.104887 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.104927 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.104980 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:26Z","lastTransitionTime":"2026-02-02T16:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:26 crc kubenswrapper[4835]: E0202 16:51:26.125952 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:26Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.130638 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.130696 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.130708 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.130732 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.130757 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:26Z","lastTransitionTime":"2026-02-02T16:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:26 crc kubenswrapper[4835]: E0202 16:51:26.148701 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:26Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.153132 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.153166 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.153174 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.153188 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.153198 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:26Z","lastTransitionTime":"2026-02-02T16:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:26 crc kubenswrapper[4835]: E0202 16:51:26.169414 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:26Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:26 crc kubenswrapper[4835]: E0202 16:51:26.169532 4835 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.171042 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.171102 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.171114 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.171133 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.171145 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:26Z","lastTransitionTime":"2026-02-02T16:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.182672 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 17:54:14.204775569 +0000 UTC Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.188038 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.188048 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:26 crc kubenswrapper[4835]: E0202 16:51:26.188323 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:26 crc kubenswrapper[4835]: E0202 16:51:26.188469 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.274734 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.274798 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.274807 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.274823 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.274833 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:26Z","lastTransitionTime":"2026-02-02T16:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.377733 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.377793 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.377828 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.377858 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.377881 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:26Z","lastTransitionTime":"2026-02-02T16:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.480631 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.480685 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.480701 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.480723 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.480739 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:26Z","lastTransitionTime":"2026-02-02T16:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.583815 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.583903 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.583937 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.583966 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:26 crc kubenswrapper[4835]: I0202 16:51:26.583989 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:26Z","lastTransitionTime":"2026-02-02T16:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:27 crc kubenswrapper[4835]: I0202 16:51:27.182954 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 01:34:15.184539208 +0000 UTC Feb 02 16:51:27 crc kubenswrapper[4835]: I0202 16:51:27.188416 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:27 crc kubenswrapper[4835]: I0202 16:51:27.188446 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:27 crc kubenswrapper[4835]: E0202 16:51:27.188559 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:27 crc kubenswrapper[4835]: E0202 16:51:27.188734 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.183913 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 05:01:49.389868028 +0000 UTC Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.396769 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.396822 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.396836 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.396853 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.396865 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:28Z","lastTransitionTime":"2026-02-02T16:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:28 crc kubenswrapper[4835]: E0202 16:51:28.397136 4835 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.209s" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.397317 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.397357 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:28 crc kubenswrapper[4835]: E0202 16:51:28.397418 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:28 crc kubenswrapper[4835]: E0202 16:51:28.397489 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.498804 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.498900 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.498928 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.498961 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.498987 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:28Z","lastTransitionTime":"2026-02-02T16:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.601794 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.601887 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.601909 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.601931 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.601948 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:28Z","lastTransitionTime":"2026-02-02T16:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.704682 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.704749 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.704767 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.704790 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.704807 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:28Z","lastTransitionTime":"2026-02-02T16:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.809118 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.809340 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.809371 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.809406 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.809444 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:28Z","lastTransitionTime":"2026-02-02T16:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.913389 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.913460 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.913476 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.913492 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:28 crc kubenswrapper[4835]: I0202 16:51:28.913504 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:28Z","lastTransitionTime":"2026-02-02T16:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.016558 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.016610 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.016622 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.016637 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.016650 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:29Z","lastTransitionTime":"2026-02-02T16:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.119631 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.119666 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.119673 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.119686 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.119694 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:29Z","lastTransitionTime":"2026-02-02T16:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.184528 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 21:54:43.679651801 +0000 UTC Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.200702 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.214875 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.222132 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.222188 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.222207 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.222230 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.222248 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:29Z","lastTransitionTime":"2026-02-02T16:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.227900 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.247874 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.262305 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.276586 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:17Z\\\",\\\"message\\\":\\\"2026-02-02T16:50:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d\\\\n2026-02-02T16:50:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d to /host/opt/cni/bin/\\\\n2026-02-02T16:50:32Z [verbose] multus-daemon started\\\\n2026-02-02T16:50:32Z [verbose] Readiness Indicator file check\\\\n2026-02-02T16:51:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.300581 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:22Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 16:51:22.061526 6857 services_controller.go:453] Built service openshift-dns-operator/metrics template LB for network=default: []services.LB{}\\\\nF0202 16:51:22.061553 6857 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"ht\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:51:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.315070 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.324150 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.324182 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.324191 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.324204 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.324214 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:29Z","lastTransitionTime":"2026-02-02T16:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.332098 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.349615 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.362507 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.373704 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.384252 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.397500 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.400779 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:29 crc kubenswrapper[4835]: E0202 16:51:29.400910 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.400791 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:29 crc kubenswrapper[4835]: E0202 16:51:29.401038 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.412970 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.427125 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.427178 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.427194 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.427215 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.427230 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:29Z","lastTransitionTime":"2026-02-02T16:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.428483 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.440437 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"359bbaaa-5447-4b3a-ac70-c794ebb86542\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43653581ec05560f58ac4c3d03ff9f5f9e105627add9e4f56026b14662543960\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02837b8c6c3c8ccd492b47276b8b7fc513a584d29d7c3d686f7104458663c9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac865c5450ef4d9e733ccbd390c1ef55d6049278ff6174dad3bd16bdfa57e037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.452717 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:29Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.530701 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.530768 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.530789 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.530820 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.530841 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:29Z","lastTransitionTime":"2026-02-02T16:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.633687 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.633733 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.633745 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.633762 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.633773 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:29Z","lastTransitionTime":"2026-02-02T16:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.737116 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.737162 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.737174 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.737190 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.737203 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:29Z","lastTransitionTime":"2026-02-02T16:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.843547 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.843610 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.843622 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.843642 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.843656 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:29Z","lastTransitionTime":"2026-02-02T16:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.946348 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.946387 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.946397 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.946472 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:29 crc kubenswrapper[4835]: I0202 16:51:29.946490 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:29Z","lastTransitionTime":"2026-02-02T16:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.048942 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.049047 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.049058 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.049074 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.049084 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:30Z","lastTransitionTime":"2026-02-02T16:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.151782 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.151832 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.151841 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.151863 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.151875 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:30Z","lastTransitionTime":"2026-02-02T16:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.185328 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 02:24:56.737279631 +0000 UTC Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.188568 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:30 crc kubenswrapper[4835]: E0202 16:51:30.188756 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.188616 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:30 crc kubenswrapper[4835]: E0202 16:51:30.188958 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.254793 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.254837 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.254847 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.255044 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.255056 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:30Z","lastTransitionTime":"2026-02-02T16:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.357739 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.357787 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.357797 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.357818 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.357834 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:30Z","lastTransitionTime":"2026-02-02T16:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.460387 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.460463 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.460487 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.460517 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.460543 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:30Z","lastTransitionTime":"2026-02-02T16:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.563144 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.563210 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.563228 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.563252 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.563306 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:30Z","lastTransitionTime":"2026-02-02T16:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.665898 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.665924 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.665931 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.665944 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.665953 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:30Z","lastTransitionTime":"2026-02-02T16:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.768852 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.768930 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.768954 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.768985 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.769007 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:30Z","lastTransitionTime":"2026-02-02T16:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.871641 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.871700 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.871723 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.871754 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.871775 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:30Z","lastTransitionTime":"2026-02-02T16:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.975366 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.975422 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.975443 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.975471 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:30 crc kubenswrapper[4835]: I0202 16:51:30.975489 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:30Z","lastTransitionTime":"2026-02-02T16:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.078252 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.078359 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.078384 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.078415 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.078440 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:31Z","lastTransitionTime":"2026-02-02T16:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.181261 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.181305 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.181315 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.181328 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.181339 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:31Z","lastTransitionTime":"2026-02-02T16:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.185790 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 22:31:18.258583519 +0000 UTC Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.188216 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.188243 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:31 crc kubenswrapper[4835]: E0202 16:51:31.188476 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:31 crc kubenswrapper[4835]: E0202 16:51:31.188642 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.284959 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.285026 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.285042 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.285064 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.285080 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:31Z","lastTransitionTime":"2026-02-02T16:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.387401 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.387449 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.387460 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.387475 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.387486 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:31Z","lastTransitionTime":"2026-02-02T16:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.490077 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.490122 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.490136 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.490155 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.490179 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:31Z","lastTransitionTime":"2026-02-02T16:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.592863 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.592916 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.592931 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.592952 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.592996 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:31Z","lastTransitionTime":"2026-02-02T16:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.696050 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.696102 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.696117 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.696140 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.696170 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:31Z","lastTransitionTime":"2026-02-02T16:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.798425 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.798490 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.798506 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.798525 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.798539 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:31Z","lastTransitionTime":"2026-02-02T16:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.901709 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.902040 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.902063 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.902090 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:31 crc kubenswrapper[4835]: I0202 16:51:31.902112 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:31Z","lastTransitionTime":"2026-02-02T16:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.005537 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.005590 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.005607 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.005631 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.005647 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:32Z","lastTransitionTime":"2026-02-02T16:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.108413 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.108441 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.108449 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.108461 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.108470 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:32Z","lastTransitionTime":"2026-02-02T16:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.186342 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 22:56:00.05284522 +0000 UTC Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.188574 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:32 crc kubenswrapper[4835]: E0202 16:51:32.188870 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.188942 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:32 crc kubenswrapper[4835]: E0202 16:51:32.189234 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.202472 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.210455 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.210518 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.210536 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.210554 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.210566 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:32Z","lastTransitionTime":"2026-02-02T16:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.319386 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.319460 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.319744 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.319778 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.319804 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:32Z","lastTransitionTime":"2026-02-02T16:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.422559 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.422631 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.422653 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.422677 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.422694 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:32Z","lastTransitionTime":"2026-02-02T16:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.526069 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.526106 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.526116 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.526131 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.526141 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:32Z","lastTransitionTime":"2026-02-02T16:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.630045 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.630109 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.630135 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.630162 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.630185 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:32Z","lastTransitionTime":"2026-02-02T16:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.732963 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.733202 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.733218 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.733243 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.733254 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:32Z","lastTransitionTime":"2026-02-02T16:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.836901 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.836947 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.836965 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.836990 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.837009 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:32Z","lastTransitionTime":"2026-02-02T16:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.939761 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.939876 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.939890 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.939907 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:32 crc kubenswrapper[4835]: I0202 16:51:32.939917 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:32Z","lastTransitionTime":"2026-02-02T16:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.042999 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.043059 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.043074 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.043098 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.043113 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:33Z","lastTransitionTime":"2026-02-02T16:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.066003 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.066105 4835 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.066168 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.066152223 +0000 UTC m=+148.687756303 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.146605 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.146674 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.146694 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.146719 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.146737 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:33Z","lastTransitionTime":"2026-02-02T16:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.167287 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.167396 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.167467 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.167518 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.167603 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.167567763 +0000 UTC m=+148.789171883 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.167631 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.167647 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.167658 4835 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.167707 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.167689097 +0000 UTC m=+148.789293277 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.167728 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.167776 4835 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.167790 4835 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.167792 4835 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.167860 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.167845341 +0000 UTC m=+148.789449461 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.167883 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.167875702 +0000 UTC m=+148.789479772 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.187372 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 14:17:58.757787657 +0000 UTC Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.188796 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.188806 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.189084 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:33 crc kubenswrapper[4835]: E0202 16:51:33.188932 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.249732 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.249806 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.249838 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.249861 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.249873 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:33Z","lastTransitionTime":"2026-02-02T16:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.352164 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.352210 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.352224 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.352243 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.352254 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:33Z","lastTransitionTime":"2026-02-02T16:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.455134 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.455173 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.455182 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.455196 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.455206 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:33Z","lastTransitionTime":"2026-02-02T16:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.556884 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.556912 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.556920 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.557093 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.557105 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:33Z","lastTransitionTime":"2026-02-02T16:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.660078 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.660173 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.660193 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.660220 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.660240 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:33Z","lastTransitionTime":"2026-02-02T16:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.763388 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.763429 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.763437 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.763453 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.763462 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:33Z","lastTransitionTime":"2026-02-02T16:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.866059 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.866091 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.866102 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.866117 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.866128 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:33Z","lastTransitionTime":"2026-02-02T16:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.969378 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.969447 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.969476 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.969520 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:33 crc kubenswrapper[4835]: I0202 16:51:33.969544 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:33Z","lastTransitionTime":"2026-02-02T16:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.072320 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.072432 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.072453 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.072476 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.072493 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:34Z","lastTransitionTime":"2026-02-02T16:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.175919 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.175963 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.175974 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.175993 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.176006 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:34Z","lastTransitionTime":"2026-02-02T16:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.187843 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 07:28:44.58172534 +0000 UTC Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.187945 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.188551 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:34 crc kubenswrapper[4835]: E0202 16:51:34.188741 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:34 crc kubenswrapper[4835]: E0202 16:51:34.188884 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.279708 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.279799 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.279829 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.279859 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.279883 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:34Z","lastTransitionTime":"2026-02-02T16:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.382481 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.382611 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.382635 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.382660 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.382675 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:34Z","lastTransitionTime":"2026-02-02T16:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.485934 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.486043 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.486068 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.486097 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.486123 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:34Z","lastTransitionTime":"2026-02-02T16:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.589370 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.589532 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.589553 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.589577 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.589596 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:34Z","lastTransitionTime":"2026-02-02T16:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.693553 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.693623 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.693644 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.693670 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.693691 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:34Z","lastTransitionTime":"2026-02-02T16:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.797269 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.797420 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.797440 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.797466 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.797483 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:34Z","lastTransitionTime":"2026-02-02T16:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.900906 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.900991 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.901016 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.901044 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:34 crc kubenswrapper[4835]: I0202 16:51:34.901062 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:34Z","lastTransitionTime":"2026-02-02T16:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.004070 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.004150 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.004175 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.004204 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.004227 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:35Z","lastTransitionTime":"2026-02-02T16:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.107600 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.107655 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.107675 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.107699 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.107718 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:35Z","lastTransitionTime":"2026-02-02T16:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.188430 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 22:19:28.71653803 +0000 UTC Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.189448 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.189653 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:35 crc kubenswrapper[4835]: E0202 16:51:35.189737 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:35 crc kubenswrapper[4835]: E0202 16:51:35.189852 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.210829 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.210888 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.210906 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.210929 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.210947 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:35Z","lastTransitionTime":"2026-02-02T16:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.314320 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.314392 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.314415 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.314440 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.314522 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:35Z","lastTransitionTime":"2026-02-02T16:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.417791 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.417855 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.417880 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.417909 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.417930 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:35Z","lastTransitionTime":"2026-02-02T16:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.520609 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.520672 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.520692 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.520720 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.520741 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:35Z","lastTransitionTime":"2026-02-02T16:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.623473 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.623559 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.623587 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.623629 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.623677 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:35Z","lastTransitionTime":"2026-02-02T16:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.726858 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.726926 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.726949 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.726979 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.726999 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:35Z","lastTransitionTime":"2026-02-02T16:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.830392 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.830455 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.830483 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.830541 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.830566 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:35Z","lastTransitionTime":"2026-02-02T16:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.933421 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.933466 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.933481 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.933501 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:35 crc kubenswrapper[4835]: I0202 16:51:35.933515 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:35Z","lastTransitionTime":"2026-02-02T16:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.036350 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.036403 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.036420 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.036445 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.036461 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.139188 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.139222 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.139230 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.139244 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.139252 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.187943 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.188093 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:36 crc kubenswrapper[4835]: E0202 16:51:36.188249 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:36 crc kubenswrapper[4835]: E0202 16:51:36.188397 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.188758 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 12:05:20.733056405 +0000 UTC Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.242191 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.242251 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.242293 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.242319 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.242336 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.344820 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.344858 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.344869 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.344884 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.344895 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.447321 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.447355 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.447364 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.447379 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.447390 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.534256 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.534360 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.534379 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.534409 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.534427 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: E0202 16:51:36.552687 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.557525 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.557595 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.557613 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.557640 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.557661 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: E0202 16:51:36.578080 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.582204 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.582235 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.582246 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.582262 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.582314 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: E0202 16:51:36.597804 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.601385 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.601420 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.601429 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.601442 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.601451 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: E0202 16:51:36.616218 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.620788 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.620872 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.620891 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.621486 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.621546 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: E0202 16:51:36.638207 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:36Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:36 crc kubenswrapper[4835]: E0202 16:51:36.638400 4835 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.640160 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.640204 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.640215 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.640232 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.640244 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.742399 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.742447 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.742459 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.742476 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.742488 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.845101 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.845142 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.845153 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.845167 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.845176 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.947717 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.947789 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.947810 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.947828 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:36 crc kubenswrapper[4835]: I0202 16:51:36.947841 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:36Z","lastTransitionTime":"2026-02-02T16:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.051908 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.051999 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.052012 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.052033 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.052047 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:37Z","lastTransitionTime":"2026-02-02T16:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.154647 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.154718 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.154758 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.154798 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.154826 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:37Z","lastTransitionTime":"2026-02-02T16:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.188225 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.188265 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:37 crc kubenswrapper[4835]: E0202 16:51:37.188402 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:37 crc kubenswrapper[4835]: E0202 16:51:37.188492 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.189364 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 15:27:45.327408177 +0000 UTC Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.258587 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.258687 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.258708 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.258732 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.258800 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:37Z","lastTransitionTime":"2026-02-02T16:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.361653 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.361828 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.361852 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.361880 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.361898 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:37Z","lastTransitionTime":"2026-02-02T16:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.465416 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.465538 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.465575 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.465676 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.465759 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:37Z","lastTransitionTime":"2026-02-02T16:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.568960 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.569025 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.569042 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.569065 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.569081 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:37Z","lastTransitionTime":"2026-02-02T16:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.672385 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.672466 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.672490 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.672520 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.672544 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:37Z","lastTransitionTime":"2026-02-02T16:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.775798 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.775868 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.775895 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.775925 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.775949 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:37Z","lastTransitionTime":"2026-02-02T16:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.879227 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.879421 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.879444 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.879474 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.879493 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:37Z","lastTransitionTime":"2026-02-02T16:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.983202 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.983263 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.983307 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.983331 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:37 crc kubenswrapper[4835]: I0202 16:51:37.983348 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:37Z","lastTransitionTime":"2026-02-02T16:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.086159 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.086223 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.086241 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.086311 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.086333 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:38Z","lastTransitionTime":"2026-02-02T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.187878 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:38 crc kubenswrapper[4835]: E0202 16:51:38.188041 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.188353 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:38 crc kubenswrapper[4835]: E0202 16:51:38.188449 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.189413 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.189461 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.189478 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.189498 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.189515 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:38Z","lastTransitionTime":"2026-02-02T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.190059 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 07:26:51.248651398 +0000 UTC Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.292212 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.292247 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.292255 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.292295 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.292305 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:38Z","lastTransitionTime":"2026-02-02T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.395713 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.395774 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.395790 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.395816 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.395834 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:38Z","lastTransitionTime":"2026-02-02T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.498394 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.498488 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.498501 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.498516 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.498534 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:38Z","lastTransitionTime":"2026-02-02T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.601760 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.601831 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.601855 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.601896 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.601921 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:38Z","lastTransitionTime":"2026-02-02T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.705782 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.705843 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.705861 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.705885 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.705904 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:38Z","lastTransitionTime":"2026-02-02T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.809080 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.809152 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.809172 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.809197 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.809216 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:38Z","lastTransitionTime":"2026-02-02T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.912687 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.912754 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.912773 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.912823 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:38 crc kubenswrapper[4835]: I0202 16:51:38.912837 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:38Z","lastTransitionTime":"2026-02-02T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.016412 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.016472 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.016482 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.016500 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.016564 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:39Z","lastTransitionTime":"2026-02-02T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.118650 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.118694 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.118702 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.118715 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.118723 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:39Z","lastTransitionTime":"2026-02-02T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.188580 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.188926 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:39 crc kubenswrapper[4835]: E0202 16:51:39.188892 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:39 crc kubenswrapper[4835]: E0202 16:51:39.189577 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.190577 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 17:06:12.376791978 +0000 UTC Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.190787 4835 scope.go:117] "RemoveContainer" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" Feb 02 16:51:39 crc kubenswrapper[4835]: E0202 16:51:39.191033 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\"" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.209382 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2a68d35-1c36-49ff-9cfb-ea9252156357\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f070537f6e3b1cf3063217468831340e228c08e517afcce52effc5dddad84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://692dc63673981310457321880880b8f84b7e7935e0916e98f497e1e26efd2df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://692dc63673981310457321880880b8f84b7e7935e0916e98f497e1e26efd2df5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.221507 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.221560 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.221580 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.221605 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.221624 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:39Z","lastTransitionTime":"2026-02-02T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.227570 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.249110 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.266417 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"359bbaaa-5447-4b3a-ac70-c794ebb86542\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43653581ec05560f58ac4c3d03ff9f5f9e105627add9e4f56026b14662543960\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02837b8c6c3c8ccd492b47276b8b7fc513a584d29d7c3d686f7104458663c9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac865c5450ef4d9e733ccbd390c1ef55d6049278ff6174dad3bd16bdfa57e037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.281907 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.312574 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.324354 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.324419 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.324433 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.324453 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.324467 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:39Z","lastTransitionTime":"2026-02-02T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.328495 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.345439 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.363643 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.379810 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.402479 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.419841 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.427425 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.427477 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.427494 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.427517 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.427533 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:39Z","lastTransitionTime":"2026-02-02T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.436810 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.471926 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.492980 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.511341 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.531012 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.531073 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.531089 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.531112 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.531132 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:39Z","lastTransitionTime":"2026-02-02T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.531834 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.553045 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:17Z\\\",\\\"message\\\":\\\"2026-02-02T16:50:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d\\\\n2026-02-02T16:50:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d to /host/opt/cni/bin/\\\\n2026-02-02T16:50:32Z [verbose] multus-daemon started\\\\n2026-02-02T16:50:32Z [verbose] Readiness Indicator file check\\\\n2026-02-02T16:51:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.585494 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:22Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 16:51:22.061526 6857 services_controller.go:453] Built service openshift-dns-operator/metrics template LB for network=default: []services.LB{}\\\\nF0202 16:51:22.061553 6857 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"ht\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:51:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.634579 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.634677 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.634694 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.634718 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.634736 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:39Z","lastTransitionTime":"2026-02-02T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.737705 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.737755 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.737771 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.737794 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.737813 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:39Z","lastTransitionTime":"2026-02-02T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.841853 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.841927 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.841948 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.842374 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.842450 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:39Z","lastTransitionTime":"2026-02-02T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.945925 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.945983 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.946000 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.946024 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:39 crc kubenswrapper[4835]: I0202 16:51:39.946042 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:39Z","lastTransitionTime":"2026-02-02T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.049935 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.049996 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.050015 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.050037 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.050054 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:40Z","lastTransitionTime":"2026-02-02T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.153065 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.153117 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.153133 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.153154 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.153170 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:40Z","lastTransitionTime":"2026-02-02T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.187809 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.187905 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:40 crc kubenswrapper[4835]: E0202 16:51:40.188106 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:40 crc kubenswrapper[4835]: E0202 16:51:40.188339 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.191224 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 19:23:27.902975647 +0000 UTC Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.256595 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.256668 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.256691 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.256718 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.256740 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:40Z","lastTransitionTime":"2026-02-02T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.360040 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.360167 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.360189 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.360213 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.360230 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:40Z","lastTransitionTime":"2026-02-02T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.463822 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.463891 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.463909 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.463939 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.463954 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:40Z","lastTransitionTime":"2026-02-02T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.567549 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.567651 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.567672 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.567700 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.567722 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:40Z","lastTransitionTime":"2026-02-02T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.671685 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.671732 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.671748 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.671772 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.671789 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:40Z","lastTransitionTime":"2026-02-02T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.774474 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.774535 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.774552 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.774575 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.774592 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:40Z","lastTransitionTime":"2026-02-02T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.877053 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.877103 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.877123 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.877155 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.877177 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:40Z","lastTransitionTime":"2026-02-02T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.979720 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.979748 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.979755 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.979768 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:40 crc kubenswrapper[4835]: I0202 16:51:40.979776 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:40Z","lastTransitionTime":"2026-02-02T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.082900 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.082950 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.082966 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.082987 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.083004 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:41Z","lastTransitionTime":"2026-02-02T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.186116 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.186172 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.186189 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.186212 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.186232 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:41Z","lastTransitionTime":"2026-02-02T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.188480 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.188549 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:41 crc kubenswrapper[4835]: E0202 16:51:41.188645 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:41 crc kubenswrapper[4835]: E0202 16:51:41.188728 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.191419 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 16:06:44.278777107 +0000 UTC Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.289478 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.289554 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.289580 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.289610 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.289633 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:41Z","lastTransitionTime":"2026-02-02T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.392961 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.393012 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.393026 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.393052 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.393071 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:41Z","lastTransitionTime":"2026-02-02T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.496478 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.496563 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.496579 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.496603 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.496619 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:41Z","lastTransitionTime":"2026-02-02T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.599404 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.599480 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.599502 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.599570 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.599591 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:41Z","lastTransitionTime":"2026-02-02T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.703181 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.703256 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.703289 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.703313 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.703330 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:41Z","lastTransitionTime":"2026-02-02T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.806100 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.806153 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.806164 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.806183 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.806195 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:41Z","lastTransitionTime":"2026-02-02T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.909754 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.909826 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.909837 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.909853 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:41 crc kubenswrapper[4835]: I0202 16:51:41.909863 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:41Z","lastTransitionTime":"2026-02-02T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.013241 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.013343 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.013367 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.013398 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.013420 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:42Z","lastTransitionTime":"2026-02-02T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.116330 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.116387 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.116410 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.116444 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.116471 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:42Z","lastTransitionTime":"2026-02-02T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.188701 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.188744 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:42 crc kubenswrapper[4835]: E0202 16:51:42.188885 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:42 crc kubenswrapper[4835]: E0202 16:51:42.188979 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.191809 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 17:01:08.125838862 +0000 UTC Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.218815 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.218874 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.218892 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.218915 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.218937 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:42Z","lastTransitionTime":"2026-02-02T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.321833 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.321944 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.321966 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.321994 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.322019 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:42Z","lastTransitionTime":"2026-02-02T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.424430 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.424482 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.424494 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.424511 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.424525 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:42Z","lastTransitionTime":"2026-02-02T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.531255 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.531980 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.532033 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.532055 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.532067 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:42Z","lastTransitionTime":"2026-02-02T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.636091 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.636159 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.636179 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.636205 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.636225 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:42Z","lastTransitionTime":"2026-02-02T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.739421 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.739505 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.739527 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.739558 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.739581 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:42Z","lastTransitionTime":"2026-02-02T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.842684 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.842760 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.842785 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.842808 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.842829 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:42Z","lastTransitionTime":"2026-02-02T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.945617 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.945708 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.945761 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.945788 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:42 crc kubenswrapper[4835]: I0202 16:51:42.945839 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:42Z","lastTransitionTime":"2026-02-02T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.049143 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.049192 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.049206 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.049223 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.049236 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:43Z","lastTransitionTime":"2026-02-02T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.152399 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.152814 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.152988 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.153131 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.153255 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:43Z","lastTransitionTime":"2026-02-02T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.188515 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:43 crc kubenswrapper[4835]: E0202 16:51:43.188917 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.189071 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:43 crc kubenswrapper[4835]: E0202 16:51:43.189487 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.191933 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 09:45:38.762358853 +0000 UTC Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.256940 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.256991 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.257003 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.257020 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.257034 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:43Z","lastTransitionTime":"2026-02-02T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.360030 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.360076 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.360120 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.360140 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.360152 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:43Z","lastTransitionTime":"2026-02-02T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.463247 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.463375 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.463428 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.463457 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.463475 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:43Z","lastTransitionTime":"2026-02-02T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.567075 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.567113 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.567122 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.567136 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.567145 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:43Z","lastTransitionTime":"2026-02-02T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.670391 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.670481 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.670509 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.670574 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.670601 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:43Z","lastTransitionTime":"2026-02-02T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.774299 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.774683 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.774700 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.774723 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.774740 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:43Z","lastTransitionTime":"2026-02-02T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.877562 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.877626 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.877642 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.877663 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.877682 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:43Z","lastTransitionTime":"2026-02-02T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.981076 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.981143 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.981160 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.981185 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:43 crc kubenswrapper[4835]: I0202 16:51:43.981203 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:43Z","lastTransitionTime":"2026-02-02T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.084640 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.084716 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.084738 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.084767 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.084790 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:44Z","lastTransitionTime":"2026-02-02T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.187839 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.187955 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.188001 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.188019 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.188049 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:44 crc kubenswrapper[4835]: E0202 16:51:44.188044 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.188067 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:44Z","lastTransitionTime":"2026-02-02T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.187871 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:44 crc kubenswrapper[4835]: E0202 16:51:44.188227 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.192850 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 04:44:21.376802034 +0000 UTC Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.291576 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.291647 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.291675 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.291705 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.291723 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:44Z","lastTransitionTime":"2026-02-02T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.394327 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.394787 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.394956 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.395086 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.395214 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:44Z","lastTransitionTime":"2026-02-02T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.498904 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.498941 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.498953 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.498983 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.498996 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:44Z","lastTransitionTime":"2026-02-02T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.601542 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.601602 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.601614 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.601633 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.601648 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:44Z","lastTransitionTime":"2026-02-02T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.704937 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.704978 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.704988 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.705003 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.705016 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:44Z","lastTransitionTime":"2026-02-02T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.807010 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.807042 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.807059 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.807075 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.807086 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:44Z","lastTransitionTime":"2026-02-02T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.908952 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.908982 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.908993 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.909008 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:44 crc kubenswrapper[4835]: I0202 16:51:44.909020 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:44Z","lastTransitionTime":"2026-02-02T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.011371 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.011402 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.011410 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.011423 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.011432 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:45Z","lastTransitionTime":"2026-02-02T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.113898 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.113958 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.113978 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.114002 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.114019 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:45Z","lastTransitionTime":"2026-02-02T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.188750 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.188770 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:45 crc kubenswrapper[4835]: E0202 16:51:45.188952 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:45 crc kubenswrapper[4835]: E0202 16:51:45.189034 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.192954 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 01:53:36.240340658 +0000 UTC Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.216004 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.216095 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.216135 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.216168 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.216192 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:45Z","lastTransitionTime":"2026-02-02T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.320367 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.320438 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.320464 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.320494 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.320515 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:45Z","lastTransitionTime":"2026-02-02T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.423362 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.423442 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.423460 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.423484 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.423500 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:45Z","lastTransitionTime":"2026-02-02T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.525990 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.526048 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.526065 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.526089 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.526109 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:45Z","lastTransitionTime":"2026-02-02T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.628195 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.628241 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.628252 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.628294 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.628308 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:45Z","lastTransitionTime":"2026-02-02T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.731113 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.731175 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.731185 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.731203 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.731214 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:45Z","lastTransitionTime":"2026-02-02T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.833807 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.833886 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.833909 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.833934 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.833950 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:45Z","lastTransitionTime":"2026-02-02T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.937541 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.937601 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.937622 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.937652 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:45 crc kubenswrapper[4835]: I0202 16:51:45.937675 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:45Z","lastTransitionTime":"2026-02-02T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.040340 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.040374 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.040383 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.040399 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.040411 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.143821 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.143884 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.143905 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.143936 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.143960 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.188455 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:46 crc kubenswrapper[4835]: E0202 16:51:46.188642 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.188470 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:46 crc kubenswrapper[4835]: E0202 16:51:46.188736 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.193838 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 15:39:09.284852336 +0000 UTC Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.246304 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.246333 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.246341 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.246355 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.246383 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.348718 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.348784 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.348793 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.348812 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.348823 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.451917 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.451987 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.452010 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.452070 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.452096 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.554522 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.554606 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.554632 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.554660 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.554681 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.657330 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.657407 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.657429 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.657453 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.657471 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.760468 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.760510 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.760518 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.760534 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.760544 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.824424 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.824464 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.824473 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.824487 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.824496 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: E0202 16:51:46.840903 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:46Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.845139 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.845201 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.845218 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.845243 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.845263 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: E0202 16:51:46.861237 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:46Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.866016 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.866090 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.866114 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.866143 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.866165 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: E0202 16:51:46.883145 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:46Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.887078 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.887172 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.887195 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.887255 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.887299 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: E0202 16:51:46.906243 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:46Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.910954 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.911010 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.911030 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.911053 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.911071 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:46 crc kubenswrapper[4835]: E0202 16:51:46.928091 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"827d890b-2331-413c-aedb-9de5a54d9bc1\\\",\\\"systemUUID\\\":\\\"a9bea5c4-1a52-47fb-a314-7115c7964a56\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:46Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:46 crc kubenswrapper[4835]: E0202 16:51:46.928345 4835 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.931834 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.931894 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.931922 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.931971 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:46 crc kubenswrapper[4835]: I0202 16:51:46.931998 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:46Z","lastTransitionTime":"2026-02-02T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.035976 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.036057 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.036079 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.036105 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.036122 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:47Z","lastTransitionTime":"2026-02-02T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.139413 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.139479 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.139501 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.139526 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.139544 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:47Z","lastTransitionTime":"2026-02-02T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.188329 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.188667 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:47 crc kubenswrapper[4835]: E0202 16:51:47.188848 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:47 crc kubenswrapper[4835]: E0202 16:51:47.188981 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.194239 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 04:13:55.075690859 +0000 UTC Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.242929 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.242998 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.243016 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.243039 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.243055 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:47Z","lastTransitionTime":"2026-02-02T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.345709 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.345768 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.345781 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.345801 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.345814 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:47Z","lastTransitionTime":"2026-02-02T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.449470 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.449541 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.449555 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.449594 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.449607 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:47Z","lastTransitionTime":"2026-02-02T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.553214 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.553249 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.553258 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.553285 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.553294 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:47Z","lastTransitionTime":"2026-02-02T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.647095 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:47 crc kubenswrapper[4835]: E0202 16:51:47.647339 4835 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:51:47 crc kubenswrapper[4835]: E0202 16:51:47.647438 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs podName:5f2e42e3-ff22-4273-9a65-d7e55792155e nodeName:}" failed. No retries permitted until 2026-02-02 16:52:51.647410271 +0000 UTC m=+163.269014451 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs") pod "network-metrics-daemon-fbl8t" (UID: "5f2e42e3-ff22-4273-9a65-d7e55792155e") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.655555 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.655825 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.655978 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.656127 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.656254 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:47Z","lastTransitionTime":"2026-02-02T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.758226 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.758314 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.758326 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.758339 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.758347 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:47Z","lastTransitionTime":"2026-02-02T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.861303 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.861656 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.861809 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.861974 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.862144 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:47Z","lastTransitionTime":"2026-02-02T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.964708 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.964774 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.964787 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.964831 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:47 crc kubenswrapper[4835]: I0202 16:51:47.964845 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:47Z","lastTransitionTime":"2026-02-02T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.068082 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.068175 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.068235 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.068258 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.068317 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:48Z","lastTransitionTime":"2026-02-02T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.170652 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.170692 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.170703 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.170719 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.170732 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:48Z","lastTransitionTime":"2026-02-02T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.188605 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:48 crc kubenswrapper[4835]: E0202 16:51:48.188839 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.188853 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:48 crc kubenswrapper[4835]: E0202 16:51:48.189026 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.194911 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 00:42:28.26265945 +0000 UTC Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.273590 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.273664 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.273690 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.273719 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.273741 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:48Z","lastTransitionTime":"2026-02-02T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.376270 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.376435 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.376508 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.376532 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.376548 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:48Z","lastTransitionTime":"2026-02-02T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.479133 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.479199 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.479221 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.479252 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.479306 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:48Z","lastTransitionTime":"2026-02-02T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.582550 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.582594 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.582608 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.582625 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.582641 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:48Z","lastTransitionTime":"2026-02-02T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.686385 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.686511 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.686529 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.686552 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.686569 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:48Z","lastTransitionTime":"2026-02-02T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.788968 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.789266 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.789434 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.789578 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.789692 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:48Z","lastTransitionTime":"2026-02-02T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.893183 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.893214 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.893225 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.893239 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.893250 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:48Z","lastTransitionTime":"2026-02-02T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.995185 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.995213 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.995220 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.995232 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:48 crc kubenswrapper[4835]: I0202 16:51:48.995240 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:48Z","lastTransitionTime":"2026-02-02T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.097420 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.097462 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.097475 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.097495 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.097511 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:49Z","lastTransitionTime":"2026-02-02T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.188329 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:49 crc kubenswrapper[4835]: E0202 16:51:49.188536 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.188617 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:49 crc kubenswrapper[4835]: E0202 16:51:49.190129 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.195406 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 13:10:33.310090716 +0000 UTC Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.199573 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.199629 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.199647 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.199672 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.199691 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:49Z","lastTransitionTime":"2026-02-02T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.230702 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f70d576-47d7-4712-a0b5-6a613ac42156\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c484ed3eb7962c8d6ad345dc64791d2c891787fcf980c1c836fce0602f72bc2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://415b3425d520324ddfc271207790071494930327e9acf4013377bb7e091ba4c5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://244c51b3d5ff25f2bcc7fbe3a1eb2ea96f5fe0b815efbfb820c06dc5a591a141\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.251469 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jq8mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56088f67-2758-4fe2-b21a-fbf6f780c704\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://26a2c2082c0c8cf88c8e82530440b2dd43b804d6525e08ce2049e5bf39477270\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w72x6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jq8mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.275762 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6345da6a-11cd-4a06-8586-5ea1c5a70bca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ee79d8f9114ff1d887d27307c612215f1ba790598c587e46d3d8d39b202e2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df9bb6f43faaf70409eed267b863ab4df56eb44f3810c8b2e9ae22c21baa4500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e6b6f5bd9040f8bc92c1ad80dd29b8004782824b6f0d488ddda159f25845c08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8e27617bc6cc8592db7471ea612d83bf0be4eff25cfe4952cc1c8b795cd3a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdb46ff71ce1761d9c4a7a7fbccbce2291279bed7f11174ad81508da50fb9299\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2df9ae5c8d4727e4157ab22d2a25d4ede092a2dc3994336651e6b79a2fced91d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://944df061479d3613417b19f750a76cf346de0f59386d436a73ef35c9a3a5f017\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpfxb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q5dl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.288851 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f2e42e3-ff22-4273-9a65-d7e55792155e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7th6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fbl8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.302937 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.303097 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.303112 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.303128 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.303163 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:49Z","lastTransitionTime":"2026-02-02T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.317260 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cbaf0a8-c75d-4059-9874-d0a193090578\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:22Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 16:51:22.061526 6857 services_controller.go:453] Built service openshift-dns-operator/metrics template LB for network=default: []services.LB{}\\\\nF0202 16:51:22.061553 6857 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"ht\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:51:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtvrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-88n4w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.331426 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2qphx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30d99635-cebd-43b3-83cf-954ee6c4f2de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fffb10b0d4840f5548c4bab7f540a86fa22f84a7d67e3e8ea895daa3c72f489d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t6w5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:32Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2qphx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.363743 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d4117a4-477d-4d84-9b70-538f489869e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128449a918a4614d5bc2ec7250a96fa6163b9416fe79402065e3c4391fa60899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cccc7456990d7cfde9a0ab37696cc32775b51863068948a4eb9434b6987967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://561b7141ef98174991c2354dcb125948ebab58b82c369daa05a202a119b25d93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8bababe4c430f5f519615f402cb8c8bb2a292a1b9d1eb8d7fb80bb28a0ce1a7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f0c238867327311c762ec626312a401e19818ad200a68d10200be1e7c045019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5767c7c3d16fb368e05efa9f5bcf030d0fc84780bd76b2ab061499c4c15f3e92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96e01ea03af7b992411ab0e01fc76a4f47f9f0179693c4a212fa919ee0d2115c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bdab0c9b2af4b69919acba8ab278ea36dc0caf3df1ccc7b991dde4178cda11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.382491 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://256a70abc9d5dc101f9847e387905eac711e73b9ac1d841d3953168417f02143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.403435 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81337cec465155fe351e3c75f317453dc68700e2b4e1a567e48ece64840619f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.405745 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.405833 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.405863 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.405893 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.405916 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:49Z","lastTransitionTime":"2026-02-02T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.425550 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.441919 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hzst6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92da4528-a699-45b1-aed0-d49a382bf0a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T16:51:17Z\\\",\\\"message\\\":\\\"2026-02-02T16:50:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d\\\\n2026-02-02T16:50:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_802eafd9-dacf-4471-9a37-bc870c7a3a8d to /host/opt/cni/bin/\\\\n2026-02-02T16:50:32Z [verbose] multus-daemon started\\\\n2026-02-02T16:50:32Z [verbose] Readiness Indicator file check\\\\n2026-02-02T16:51:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z5bw5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hzst6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.457700 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2a68d35-1c36-49ff-9cfb-ea9252156357\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f070537f6e3b1cf3063217468831340e228c08e517afcce52effc5dddad84bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://692dc63673981310457321880880b8f84b7e7935e0916e98f497e1e26efd2df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://692dc63673981310457321880880b8f84b7e7935e0916e98f497e1e26efd2df5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.474245 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.490609 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"88fd225f-00aa-432d-a669-8415fa06fa9e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24cb4146db0e1db8b8a7e493dd532a449dda90b01ecb31e2757aef4978acf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f19a6d195d17d6a94e08c5898740bdf1126bebb89bc9f9296c03f327e823097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-btklq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-67xl5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.508563 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.508628 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.508646 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.508671 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.508688 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:49Z","lastTransitionTime":"2026-02-02T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.511187 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T16:50:22Z\\\",\\\"message\\\":\\\"W0202 16:50:12.339912 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 16:50:12.340595 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770051012 cert, and key in /tmp/serving-cert-3944327345/serving-signer.crt, /tmp/serving-cert-3944327345/serving-signer.key\\\\nI0202 16:50:12.540613 1 observer_polling.go:159] Starting file observer\\\\nW0202 16:50:12.543807 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 16:50:12.544028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 16:50:12.545898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3944327345/tls.crt::/tmp/serving-cert-3944327345/tls.key\\\\\\\"\\\\nF0202 16:50:22.916781 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.529068 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"359bbaaa-5447-4b3a-ac70-c794ebb86542\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:51:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43653581ec05560f58ac4c3d03ff9f5f9e105627add9e4f56026b14662543960\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02837b8c6c3c8ccd492b47276b8b7fc513a584d29d7c3d686f7104458663c9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac865c5450ef4d9e733ccbd390c1ef55d6049278ff6174dad3bd16bdfa57e037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7873fdfadfba02aa51f67d153cec8f9f99d867e82cb3b96f642606ae51fd472\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T16:50:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T16:50:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.552234 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.573207 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7d462f706200ea067e594ecce4015c98a76e286a1cf8d188d38c1fac5466396\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e24d4955fc80f7cfed8773d72dff96048fa5182bf4e260f40cfb620650b63ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.588636 4835 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878a5fb-e7f6-4458-8bcc-119bf67ad45a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T16:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98d449bcc29fb85d374fcd11a48f18a48a4fcc7b86b18b7ff30e46d128d8b1a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T16:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvmfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T16:50:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-94jlf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T16:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.612022 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.612100 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.612117 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.612144 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.612163 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:49Z","lastTransitionTime":"2026-02-02T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.714948 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.715012 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.715030 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.715054 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.715073 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:49Z","lastTransitionTime":"2026-02-02T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.817895 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.817940 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.817955 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.817977 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.817990 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:49Z","lastTransitionTime":"2026-02-02T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.921490 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.921553 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.921570 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.921593 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:49 crc kubenswrapper[4835]: I0202 16:51:49.921610 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:49Z","lastTransitionTime":"2026-02-02T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.025358 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.025400 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.025412 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.025430 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.025442 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:50Z","lastTransitionTime":"2026-02-02T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.127859 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.127899 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.127909 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.127923 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.127934 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:50Z","lastTransitionTime":"2026-02-02T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.188663 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:50 crc kubenswrapper[4835]: E0202 16:51:50.188760 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.188669 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:50 crc kubenswrapper[4835]: E0202 16:51:50.189070 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.195673 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 15:42:09.210237118 +0000 UTC Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.230403 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.230454 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.230470 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.230492 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.230508 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:50Z","lastTransitionTime":"2026-02-02T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.332617 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.332670 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.332687 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.332709 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.332724 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:50Z","lastTransitionTime":"2026-02-02T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.436218 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.436397 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.436421 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.436445 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.436462 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:50Z","lastTransitionTime":"2026-02-02T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.540245 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.540356 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.540381 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.540407 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.540424 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:50Z","lastTransitionTime":"2026-02-02T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.642735 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.642803 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.642825 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.642855 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.642875 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:50Z","lastTransitionTime":"2026-02-02T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.745567 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.745635 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.745654 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.745680 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.745698 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:50Z","lastTransitionTime":"2026-02-02T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.848182 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.848300 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.848314 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.848331 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.848342 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:50Z","lastTransitionTime":"2026-02-02T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.952149 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.952201 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.952210 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.952226 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:50 crc kubenswrapper[4835]: I0202 16:51:50.952238 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:50Z","lastTransitionTime":"2026-02-02T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.054922 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.054975 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.054990 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.055007 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.055023 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:51Z","lastTransitionTime":"2026-02-02T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.158717 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.158798 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.158828 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.158862 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.158889 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:51Z","lastTransitionTime":"2026-02-02T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.188446 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.188497 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:51 crc kubenswrapper[4835]: E0202 16:51:51.188640 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:51 crc kubenswrapper[4835]: E0202 16:51:51.188766 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.196353 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 22:14:58.350132435 +0000 UTC Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.261867 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.261927 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.261943 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.261967 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.261988 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:51Z","lastTransitionTime":"2026-02-02T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.365169 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.365219 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.365230 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.365248 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.365260 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:51Z","lastTransitionTime":"2026-02-02T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.468094 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.468132 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.468141 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.468157 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.468167 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:51Z","lastTransitionTime":"2026-02-02T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.570817 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.570906 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.570928 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.570950 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.570965 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:51Z","lastTransitionTime":"2026-02-02T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.673985 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.674019 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.674028 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.674041 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.674050 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:51Z","lastTransitionTime":"2026-02-02T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.777235 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.777320 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.777343 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.777372 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.777397 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:51Z","lastTransitionTime":"2026-02-02T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.880217 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.880303 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.880323 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.880346 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.880363 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:51Z","lastTransitionTime":"2026-02-02T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.983246 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.983365 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.983388 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.983415 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:51 crc kubenswrapper[4835]: I0202 16:51:51.983434 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:51Z","lastTransitionTime":"2026-02-02T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.086153 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.086233 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.086249 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.086293 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.086313 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:52Z","lastTransitionTime":"2026-02-02T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.188007 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.188085 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:52 crc kubenswrapper[4835]: E0202 16:51:52.188246 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:52 crc kubenswrapper[4835]: E0202 16:51:52.188425 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.189220 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.189351 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.189380 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.189415 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.189438 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:52Z","lastTransitionTime":"2026-02-02T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.196792 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 20:23:16.433730393 +0000 UTC Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.291985 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.292043 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.292056 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.292071 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.292083 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:52Z","lastTransitionTime":"2026-02-02T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.395186 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.395230 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.395239 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.395255 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.395264 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:52Z","lastTransitionTime":"2026-02-02T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.497714 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.497753 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.497762 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.497777 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.497786 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:52Z","lastTransitionTime":"2026-02-02T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.600636 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.600692 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.600707 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.600724 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.600736 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:52Z","lastTransitionTime":"2026-02-02T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.705921 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.705964 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.705973 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.705989 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.705998 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:52Z","lastTransitionTime":"2026-02-02T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.808699 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.808763 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.808781 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.808806 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.808823 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:52Z","lastTransitionTime":"2026-02-02T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.912087 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.912146 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.912168 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.912195 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:52 crc kubenswrapper[4835]: I0202 16:51:52.912214 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:52Z","lastTransitionTime":"2026-02-02T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.014549 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.014655 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.014727 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.014782 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.014806 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:53Z","lastTransitionTime":"2026-02-02T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.119798 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.119844 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.119857 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.119875 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.119889 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:53Z","lastTransitionTime":"2026-02-02T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.188819 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.188819 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:53 crc kubenswrapper[4835]: E0202 16:51:53.189631 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:53 crc kubenswrapper[4835]: E0202 16:51:53.189784 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.190341 4835 scope.go:117] "RemoveContainer" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" Feb 02 16:51:53 crc kubenswrapper[4835]: E0202 16:51:53.190621 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-88n4w_openshift-ovn-kubernetes(0cbaf0a8-c75d-4059-9874-d0a193090578)\"" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.197146 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 14:21:13.646915447 +0000 UTC Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.222608 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.222667 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.222687 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.222707 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.222721 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:53Z","lastTransitionTime":"2026-02-02T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.326587 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.326644 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.326660 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.326680 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.326693 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:53Z","lastTransitionTime":"2026-02-02T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.428813 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.428864 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.428878 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.428897 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.428911 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:53Z","lastTransitionTime":"2026-02-02T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.531508 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.531586 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.531611 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.531641 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.531662 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:53Z","lastTransitionTime":"2026-02-02T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.634207 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.634264 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.634335 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.634364 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.634384 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:53Z","lastTransitionTime":"2026-02-02T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.735554 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.735582 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.735598 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.735613 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.735625 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:53Z","lastTransitionTime":"2026-02-02T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.838811 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.838858 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.838882 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.838913 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.838936 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:53Z","lastTransitionTime":"2026-02-02T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.942066 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.942141 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.942166 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.942194 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:53 crc kubenswrapper[4835]: I0202 16:51:53.942213 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:53Z","lastTransitionTime":"2026-02-02T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.044171 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.044247 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.044304 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.044334 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.044356 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:54Z","lastTransitionTime":"2026-02-02T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.147494 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.147546 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.147570 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.147601 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.147624 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:54Z","lastTransitionTime":"2026-02-02T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.188109 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:54 crc kubenswrapper[4835]: E0202 16:51:54.188499 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.188113 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:54 crc kubenswrapper[4835]: E0202 16:51:54.188923 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.197922 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 19:30:44.234510892 +0000 UTC Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.251212 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.251263 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.251367 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.251408 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.251431 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:54Z","lastTransitionTime":"2026-02-02T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.354387 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.354468 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.354507 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.354539 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.354560 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:54Z","lastTransitionTime":"2026-02-02T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.457795 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.457851 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.457869 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.457894 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.457911 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:54Z","lastTransitionTime":"2026-02-02T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.561606 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.561673 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.561700 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.561737 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.561760 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:54Z","lastTransitionTime":"2026-02-02T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.664997 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.665083 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.665100 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.665123 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.665144 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:54Z","lastTransitionTime":"2026-02-02T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.767056 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.767116 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.767133 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.767155 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.767173 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:54Z","lastTransitionTime":"2026-02-02T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.870148 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.870201 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.870219 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.870241 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.870259 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:54Z","lastTransitionTime":"2026-02-02T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.973898 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.973951 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.973960 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.973980 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:54 crc kubenswrapper[4835]: I0202 16:51:54.973991 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:54Z","lastTransitionTime":"2026-02-02T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.077165 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.077301 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.077328 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.077359 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.077387 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:55Z","lastTransitionTime":"2026-02-02T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.180259 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.180349 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.180368 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.180388 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.180399 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:55Z","lastTransitionTime":"2026-02-02T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.188682 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.188731 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:55 crc kubenswrapper[4835]: E0202 16:51:55.188859 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:55 crc kubenswrapper[4835]: E0202 16:51:55.189069 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.198064 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 01:19:57.71905011 +0000 UTC Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.283385 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.283421 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.283432 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.283449 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.283461 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:55Z","lastTransitionTime":"2026-02-02T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.386590 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.386633 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.386642 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.386657 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.386685 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:55Z","lastTransitionTime":"2026-02-02T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.489003 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.489077 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.489100 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.489128 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.489151 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:55Z","lastTransitionTime":"2026-02-02T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.592655 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.592709 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.592725 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.592750 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.592769 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:55Z","lastTransitionTime":"2026-02-02T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.696124 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.696173 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.696190 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.696216 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.696248 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:55Z","lastTransitionTime":"2026-02-02T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.799797 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.799883 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.799911 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.799940 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.799961 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:55Z","lastTransitionTime":"2026-02-02T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.902420 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.902501 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.902526 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.902556 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:55 crc kubenswrapper[4835]: I0202 16:51:55.902579 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:55Z","lastTransitionTime":"2026-02-02T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.005061 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.005149 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.005190 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.005225 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.005253 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:56Z","lastTransitionTime":"2026-02-02T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.108948 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.109019 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.109055 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.109094 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.109117 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:56Z","lastTransitionTime":"2026-02-02T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.188582 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.188619 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:56 crc kubenswrapper[4835]: E0202 16:51:56.189388 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:56 crc kubenswrapper[4835]: E0202 16:51:56.189615 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.198869 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 10:58:52.790825578 +0000 UTC Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.212237 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.212538 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.212847 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.213101 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.213367 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:56Z","lastTransitionTime":"2026-02-02T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.316043 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.316469 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.316696 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.316856 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.317005 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:56Z","lastTransitionTime":"2026-02-02T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.420156 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.420232 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.420250 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.420303 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.420327 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:56Z","lastTransitionTime":"2026-02-02T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.523316 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.523818 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.524029 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.524229 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.524573 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:56Z","lastTransitionTime":"2026-02-02T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.627688 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.627808 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.627835 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.627866 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.627887 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:56Z","lastTransitionTime":"2026-02-02T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.730366 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.730431 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.730448 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.730471 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.730486 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:56Z","lastTransitionTime":"2026-02-02T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.832903 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.832963 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.832978 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.832998 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.833010 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:56Z","lastTransitionTime":"2026-02-02T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.936756 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.936824 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.936843 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.936870 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:56 crc kubenswrapper[4835]: I0202 16:51:56.936893 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:56Z","lastTransitionTime":"2026-02-02T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.039743 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.039879 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.039897 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.039932 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.039949 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:57Z","lastTransitionTime":"2026-02-02T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.079244 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.079336 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.079355 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.079382 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.079399 4835 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T16:51:57Z","lastTransitionTime":"2026-02-02T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.154763 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9"] Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.155400 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.157612 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.158215 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.159750 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.162148 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.188762 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.188945 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:57 crc kubenswrapper[4835]: E0202 16:51:57.189087 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.189016 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=89.188985065 podStartE2EDuration="1m29.188985065s" podCreationTimestamp="2026-02-02 16:50:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:57.188561983 +0000 UTC m=+108.810166073" watchObservedRunningTime="2026-02-02 16:51:57.188985065 +0000 UTC m=+108.810589185" Feb 02 16:51:57 crc kubenswrapper[4835]: E0202 16:51:57.189472 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.199429 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 11:09:37.894248898 +0000 UTC Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.200570 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.207458 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-jq8mv" podStartSLOduration=88.207427967 podStartE2EDuration="1m28.207427967s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:57.207087367 +0000 UTC m=+108.828691477" watchObservedRunningTime="2026-02-02 16:51:57.207427967 +0000 UTC m=+108.829032087" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.210692 4835 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.233018 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-q5dl9" podStartSLOduration=88.232998218 podStartE2EDuration="1m28.232998218s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:57.232661628 +0000 UTC m=+108.854265758" watchObservedRunningTime="2026-02-02 16:51:57.232998218 +0000 UTC m=+108.854602298" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.254611 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e32245f-01b0-428d-b2c1-62f558c02df9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.254739 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e32245f-01b0-428d-b2c1-62f558c02df9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.254790 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5e32245f-01b0-428d-b2c1-62f558c02df9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.254860 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5e32245f-01b0-428d-b2c1-62f558c02df9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.255004 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5e32245f-01b0-428d-b2c1-62f558c02df9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.301503 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-2qphx" podStartSLOduration=88.30148117 podStartE2EDuration="1m28.30148117s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:57.301442419 +0000 UTC m=+108.923046559" watchObservedRunningTime="2026-02-02 16:51:57.30148117 +0000 UTC m=+108.923085260" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.331434 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=88.33141457 podStartE2EDuration="1m28.33141457s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:57.330978867 +0000 UTC m=+108.952582957" watchObservedRunningTime="2026-02-02 16:51:57.33141457 +0000 UTC m=+108.953018660" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.355933 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e32245f-01b0-428d-b2c1-62f558c02df9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.356006 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e32245f-01b0-428d-b2c1-62f558c02df9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.356038 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5e32245f-01b0-428d-b2c1-62f558c02df9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.356070 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5e32245f-01b0-428d-b2c1-62f558c02df9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.356128 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5e32245f-01b0-428d-b2c1-62f558c02df9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.356196 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5e32245f-01b0-428d-b2c1-62f558c02df9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.357411 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5e32245f-01b0-428d-b2c1-62f558c02df9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.358424 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5e32245f-01b0-428d-b2c1-62f558c02df9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.362751 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e32245f-01b0-428d-b2c1-62f558c02df9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.378349 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e32245f-01b0-428d-b2c1-62f558c02df9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-r9cv9\" (UID: \"5e32245f-01b0-428d-b2c1-62f558c02df9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.415554 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-hzst6" podStartSLOduration=88.415526701 podStartE2EDuration="1m28.415526701s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:57.401714815 +0000 UTC m=+109.023318905" watchObservedRunningTime="2026-02-02 16:51:57.415526701 +0000 UTC m=+109.037130781" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.429605 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=25.429580154 podStartE2EDuration="25.429580154s" podCreationTimestamp="2026-02-02 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:57.415908282 +0000 UTC m=+109.037512362" watchObservedRunningTime="2026-02-02 16:51:57.429580154 +0000 UTC m=+109.051184234" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.455326 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-67xl5" podStartSLOduration=87.45530343 podStartE2EDuration="1m27.45530343s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:57.442322148 +0000 UTC m=+109.063926248" watchObservedRunningTime="2026-02-02 16:51:57.45530343 +0000 UTC m=+109.076907510" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.483546 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.493740 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=89.493710048 podStartE2EDuration="1m29.493710048s" podCreationTimestamp="2026-02-02 16:50:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:57.474115433 +0000 UTC m=+109.095719513" watchObservedRunningTime="2026-02-02 16:51:57.493710048 +0000 UTC m=+109.115314158" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.494247 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=53.494232104 podStartE2EDuration="53.494232104s" podCreationTimestamp="2026-02-02 16:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:57.491977647 +0000 UTC m=+109.113581767" watchObservedRunningTime="2026-02-02 16:51:57.494232104 +0000 UTC m=+109.115836224" Feb 02 16:51:57 crc kubenswrapper[4835]: I0202 16:51:57.542437 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podStartSLOduration=88.542412609 podStartE2EDuration="1m28.542412609s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:57.541248025 +0000 UTC m=+109.162852115" watchObservedRunningTime="2026-02-02 16:51:57.542412609 +0000 UTC m=+109.164016689" Feb 02 16:51:58 crc kubenswrapper[4835]: I0202 16:51:58.188814 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:51:58 crc kubenswrapper[4835]: I0202 16:51:58.188880 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:51:58 crc kubenswrapper[4835]: E0202 16:51:58.188970 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:51:58 crc kubenswrapper[4835]: E0202 16:51:58.189173 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:51:58 crc kubenswrapper[4835]: I0202 16:51:58.503692 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" event={"ID":"5e32245f-01b0-428d-b2c1-62f558c02df9","Type":"ContainerStarted","Data":"ccd9277f72e7743d3ef109f7123e99a2fb4532ac9a11cac5cf47cb6832286140"} Feb 02 16:51:58 crc kubenswrapper[4835]: I0202 16:51:58.503776 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" event={"ID":"5e32245f-01b0-428d-b2c1-62f558c02df9","Type":"ContainerStarted","Data":"492c3afe5f7ff0f2cebf4d930b92bd33967fcbd828fe41e2643b8692c15c0104"} Feb 02 16:51:58 crc kubenswrapper[4835]: I0202 16:51:58.522315 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r9cv9" podStartSLOduration=89.522266679 podStartE2EDuration="1m29.522266679s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:51:58.520952321 +0000 UTC m=+110.142556441" watchObservedRunningTime="2026-02-02 16:51:58.522266679 +0000 UTC m=+110.143870799" Feb 02 16:51:59 crc kubenswrapper[4835]: I0202 16:51:59.188486 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:51:59 crc kubenswrapper[4835]: I0202 16:51:59.188569 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:51:59 crc kubenswrapper[4835]: E0202 16:51:59.189333 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:51:59 crc kubenswrapper[4835]: E0202 16:51:59.189533 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:00 crc kubenswrapper[4835]: I0202 16:52:00.188336 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:00 crc kubenswrapper[4835]: I0202 16:52:00.188387 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:00 crc kubenswrapper[4835]: E0202 16:52:00.188718 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:00 crc kubenswrapper[4835]: E0202 16:52:00.188997 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:01 crc kubenswrapper[4835]: I0202 16:52:01.187919 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:01 crc kubenswrapper[4835]: I0202 16:52:01.187968 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:01 crc kubenswrapper[4835]: E0202 16:52:01.188095 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:01 crc kubenswrapper[4835]: E0202 16:52:01.188170 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:02 crc kubenswrapper[4835]: I0202 16:52:02.189265 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:02 crc kubenswrapper[4835]: I0202 16:52:02.189449 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:02 crc kubenswrapper[4835]: E0202 16:52:02.189566 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:02 crc kubenswrapper[4835]: E0202 16:52:02.189662 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:03 crc kubenswrapper[4835]: I0202 16:52:03.188955 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:03 crc kubenswrapper[4835]: I0202 16:52:03.189007 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:03 crc kubenswrapper[4835]: E0202 16:52:03.189113 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:03 crc kubenswrapper[4835]: E0202 16:52:03.189261 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:04 crc kubenswrapper[4835]: I0202 16:52:04.188237 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:04 crc kubenswrapper[4835]: I0202 16:52:04.188237 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:04 crc kubenswrapper[4835]: E0202 16:52:04.188443 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:04 crc kubenswrapper[4835]: E0202 16:52:04.188547 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:04 crc kubenswrapper[4835]: I0202 16:52:04.527890 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hzst6_92da4528-a699-45b1-aed0-d49a382bf0a1/kube-multus/1.log" Feb 02 16:52:04 crc kubenswrapper[4835]: I0202 16:52:04.528554 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hzst6_92da4528-a699-45b1-aed0-d49a382bf0a1/kube-multus/0.log" Feb 02 16:52:04 crc kubenswrapper[4835]: I0202 16:52:04.528616 4835 generic.go:334] "Generic (PLEG): container finished" podID="92da4528-a699-45b1-aed0-d49a382bf0a1" containerID="372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab" exitCode=1 Feb 02 16:52:04 crc kubenswrapper[4835]: I0202 16:52:04.528728 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hzst6" event={"ID":"92da4528-a699-45b1-aed0-d49a382bf0a1","Type":"ContainerDied","Data":"372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab"} Feb 02 16:52:04 crc kubenswrapper[4835]: I0202 16:52:04.528868 4835 scope.go:117] "RemoveContainer" containerID="ecd8ad0f5fbe9d1c015d663363d87910cf5bd4e139aed1a0500e645c6b76f347" Feb 02 16:52:04 crc kubenswrapper[4835]: I0202 16:52:04.529609 4835 scope.go:117] "RemoveContainer" containerID="372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab" Feb 02 16:52:04 crc kubenswrapper[4835]: E0202 16:52:04.530159 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-hzst6_openshift-multus(92da4528-a699-45b1-aed0-d49a382bf0a1)\"" pod="openshift-multus/multus-hzst6" podUID="92da4528-a699-45b1-aed0-d49a382bf0a1" Feb 02 16:52:05 crc kubenswrapper[4835]: I0202 16:52:05.188003 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:05 crc kubenswrapper[4835]: E0202 16:52:05.188127 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:05 crc kubenswrapper[4835]: I0202 16:52:05.188003 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:05 crc kubenswrapper[4835]: E0202 16:52:05.188255 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:05 crc kubenswrapper[4835]: I0202 16:52:05.188862 4835 scope.go:117] "RemoveContainer" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" Feb 02 16:52:05 crc kubenswrapper[4835]: I0202 16:52:05.534082 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/3.log" Feb 02 16:52:05 crc kubenswrapper[4835]: I0202 16:52:05.536735 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerStarted","Data":"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562"} Feb 02 16:52:05 crc kubenswrapper[4835]: I0202 16:52:05.537452 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:52:05 crc kubenswrapper[4835]: I0202 16:52:05.543181 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hzst6_92da4528-a699-45b1-aed0-d49a382bf0a1/kube-multus/1.log" Feb 02 16:52:05 crc kubenswrapper[4835]: I0202 16:52:05.566254 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podStartSLOduration=96.566235801 podStartE2EDuration="1m36.566235801s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:05.565572312 +0000 UTC m=+117.187176452" watchObservedRunningTime="2026-02-02 16:52:05.566235801 +0000 UTC m=+117.187839881" Feb 02 16:52:06 crc kubenswrapper[4835]: I0202 16:52:06.056035 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-fbl8t"] Feb 02 16:52:06 crc kubenswrapper[4835]: I0202 16:52:06.056220 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:06 crc kubenswrapper[4835]: E0202 16:52:06.056374 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:06 crc kubenswrapper[4835]: I0202 16:52:06.188015 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:06 crc kubenswrapper[4835]: E0202 16:52:06.188154 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:06 crc kubenswrapper[4835]: I0202 16:52:06.188231 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:06 crc kubenswrapper[4835]: E0202 16:52:06.188437 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:07 crc kubenswrapper[4835]: I0202 16:52:07.188499 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:07 crc kubenswrapper[4835]: E0202 16:52:07.188632 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:08 crc kubenswrapper[4835]: I0202 16:52:08.187891 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:08 crc kubenswrapper[4835]: E0202 16:52:08.188030 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:08 crc kubenswrapper[4835]: I0202 16:52:08.187888 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:08 crc kubenswrapper[4835]: E0202 16:52:08.188114 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:08 crc kubenswrapper[4835]: I0202 16:52:08.187888 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:08 crc kubenswrapper[4835]: E0202 16:52:08.188187 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:09 crc kubenswrapper[4835]: I0202 16:52:09.188778 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:09 crc kubenswrapper[4835]: E0202 16:52:09.193477 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:09 crc kubenswrapper[4835]: E0202 16:52:09.203238 4835 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Feb 02 16:52:09 crc kubenswrapper[4835]: E0202 16:52:09.296475 4835 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 16:52:10 crc kubenswrapper[4835]: I0202 16:52:10.188391 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:10 crc kubenswrapper[4835]: I0202 16:52:10.188462 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:10 crc kubenswrapper[4835]: I0202 16:52:10.188578 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:10 crc kubenswrapper[4835]: E0202 16:52:10.188688 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:10 crc kubenswrapper[4835]: E0202 16:52:10.188812 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:10 crc kubenswrapper[4835]: E0202 16:52:10.189005 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:11 crc kubenswrapper[4835]: I0202 16:52:11.187974 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:11 crc kubenswrapper[4835]: E0202 16:52:11.188115 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:12 crc kubenswrapper[4835]: I0202 16:52:12.187936 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:12 crc kubenswrapper[4835]: I0202 16:52:12.188013 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:12 crc kubenswrapper[4835]: I0202 16:52:12.188006 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:12 crc kubenswrapper[4835]: E0202 16:52:12.188516 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:12 crc kubenswrapper[4835]: E0202 16:52:12.188936 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:12 crc kubenswrapper[4835]: E0202 16:52:12.189014 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:13 crc kubenswrapper[4835]: I0202 16:52:13.188002 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:13 crc kubenswrapper[4835]: E0202 16:52:13.188878 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:14 crc kubenswrapper[4835]: I0202 16:52:14.188772 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:14 crc kubenswrapper[4835]: I0202 16:52:14.188879 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:14 crc kubenswrapper[4835]: E0202 16:52:14.188920 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:14 crc kubenswrapper[4835]: I0202 16:52:14.188773 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:14 crc kubenswrapper[4835]: E0202 16:52:14.189026 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:14 crc kubenswrapper[4835]: E0202 16:52:14.189211 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:14 crc kubenswrapper[4835]: E0202 16:52:14.297834 4835 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 16:52:15 crc kubenswrapper[4835]: I0202 16:52:15.188432 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:15 crc kubenswrapper[4835]: E0202 16:52:15.188613 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:16 crc kubenswrapper[4835]: I0202 16:52:16.188910 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:16 crc kubenswrapper[4835]: I0202 16:52:16.188986 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:16 crc kubenswrapper[4835]: I0202 16:52:16.189070 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:16 crc kubenswrapper[4835]: E0202 16:52:16.189138 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:16 crc kubenswrapper[4835]: E0202 16:52:16.189314 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:16 crc kubenswrapper[4835]: E0202 16:52:16.189486 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:17 crc kubenswrapper[4835]: I0202 16:52:17.188250 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:17 crc kubenswrapper[4835]: E0202 16:52:17.188521 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:18 crc kubenswrapper[4835]: I0202 16:52:18.188822 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:18 crc kubenswrapper[4835]: E0202 16:52:18.188997 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:18 crc kubenswrapper[4835]: I0202 16:52:18.189229 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:18 crc kubenswrapper[4835]: E0202 16:52:18.189676 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:18 crc kubenswrapper[4835]: I0202 16:52:18.188811 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:18 crc kubenswrapper[4835]: E0202 16:52:18.190027 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:19 crc kubenswrapper[4835]: I0202 16:52:19.188375 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:19 crc kubenswrapper[4835]: E0202 16:52:19.190420 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:19 crc kubenswrapper[4835]: E0202 16:52:19.299064 4835 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 16:52:20 crc kubenswrapper[4835]: I0202 16:52:20.188445 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:20 crc kubenswrapper[4835]: I0202 16:52:20.188604 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:20 crc kubenswrapper[4835]: I0202 16:52:20.188714 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:20 crc kubenswrapper[4835]: E0202 16:52:20.188777 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:20 crc kubenswrapper[4835]: I0202 16:52:20.189006 4835 scope.go:117] "RemoveContainer" containerID="372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab" Feb 02 16:52:20 crc kubenswrapper[4835]: E0202 16:52:20.188990 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:20 crc kubenswrapper[4835]: E0202 16:52:20.189228 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:20 crc kubenswrapper[4835]: I0202 16:52:20.608539 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hzst6_92da4528-a699-45b1-aed0-d49a382bf0a1/kube-multus/1.log" Feb 02 16:52:20 crc kubenswrapper[4835]: I0202 16:52:20.608877 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hzst6" event={"ID":"92da4528-a699-45b1-aed0-d49a382bf0a1","Type":"ContainerStarted","Data":"561a1a9beb47443e82e9257c7fd897da040fa5f33b07d13929eb4206b7e50a75"} Feb 02 16:52:21 crc kubenswrapper[4835]: I0202 16:52:21.188563 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:21 crc kubenswrapper[4835]: E0202 16:52:21.188962 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:22 crc kubenswrapper[4835]: I0202 16:52:22.188822 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:22 crc kubenswrapper[4835]: I0202 16:52:22.188864 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:22 crc kubenswrapper[4835]: I0202 16:52:22.188949 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:22 crc kubenswrapper[4835]: E0202 16:52:22.189010 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:22 crc kubenswrapper[4835]: E0202 16:52:22.189132 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:22 crc kubenswrapper[4835]: E0202 16:52:22.189258 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:23 crc kubenswrapper[4835]: I0202 16:52:23.188572 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:23 crc kubenswrapper[4835]: E0202 16:52:23.188781 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 16:52:24 crc kubenswrapper[4835]: I0202 16:52:24.188602 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:24 crc kubenswrapper[4835]: I0202 16:52:24.188646 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:24 crc kubenswrapper[4835]: I0202 16:52:24.188668 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:24 crc kubenswrapper[4835]: E0202 16:52:24.188813 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 16:52:24 crc kubenswrapper[4835]: E0202 16:52:24.188863 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 16:52:24 crc kubenswrapper[4835]: E0202 16:52:24.188994 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fbl8t" podUID="5f2e42e3-ff22-4273-9a65-d7e55792155e" Feb 02 16:52:25 crc kubenswrapper[4835]: I0202 16:52:25.188016 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:25 crc kubenswrapper[4835]: I0202 16:52:25.191218 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 02 16:52:25 crc kubenswrapper[4835]: I0202 16:52:25.191597 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 02 16:52:26 crc kubenswrapper[4835]: I0202 16:52:26.187818 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:26 crc kubenswrapper[4835]: I0202 16:52:26.187899 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:26 crc kubenswrapper[4835]: I0202 16:52:26.187851 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:26 crc kubenswrapper[4835]: I0202 16:52:26.190572 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 02 16:52:26 crc kubenswrapper[4835]: I0202 16:52:26.190657 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 02 16:52:26 crc kubenswrapper[4835]: I0202 16:52:26.190721 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 02 16:52:26 crc kubenswrapper[4835]: I0202 16:52:26.191341 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.789557 4835 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.916519 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-87z74"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.916815 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.916968 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-kk9sb"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.917212 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.918142 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.918633 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.920258 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.920783 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-2n9fx"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.920962 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-44vfm"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.921246 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.921538 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.922095 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.930697 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.932201 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.933216 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.933739 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.934850 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.935285 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.935523 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.936074 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.938495 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.938502 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.938937 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.942890 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.942963 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.943010 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.943199 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.943252 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.943413 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.943548 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.946916 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vk9xf"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.947014 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.962053 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.962403 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.962495 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.962712 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.962828 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.962929 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.963051 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.963159 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.963263 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.963418 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.963527 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.963642 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.963917 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.964021 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.964132 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.964245 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.964709 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.964912 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.965026 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.965866 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.966323 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.966833 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.966888 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.966984 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.967379 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.967390 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.967603 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.967733 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.967618 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.967922 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.968001 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.968071 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.968607 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.970036 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8wthb"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.970407 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.970702 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.970780 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.974894 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.976119 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dg8r2"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.976702 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.977251 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.977340 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.976714 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.978344 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sfbf9"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.978807 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.979494 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-wq452"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.980171 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.983602 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.986700 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.986905 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.986907 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.988517 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-xpn8c"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.988905 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt"] Feb 02 16:52:27 crc kubenswrapper[4835]: I0202 16:52:27.989116 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.006993 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.008063 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.008790 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.014188 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.015186 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.015869 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.016877 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.017424 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.018240 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.019038 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.019128 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.020956 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.021018 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.021345 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.021401 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.021744 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-smljg"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.021850 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.022003 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.022882 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.028173 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.028515 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.032805 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.036026 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.037011 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.037308 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.037391 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.038413 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.038575 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.038946 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.039064 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.039287 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.039399 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.039497 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.039681 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.039770 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.039943 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.040081 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.040119 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.040317 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.040682 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.040860 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.040992 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041031 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.038570 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dd17476b-94eb-4998-8477-50763c1d1222-etcd-client\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041109 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd17476b-94eb-4998-8477-50763c1d1222-etcd-service-ca\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041146 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041154 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.040085 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041242 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041370 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041400 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041146 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/dd17476b-94eb-4998-8477-50763c1d1222-etcd-ca\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041451 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e5478630-cfe1-49d8-b597-7c2c9afec10f-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041481 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-serving-cert\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041502 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041505 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqjgr\" (UniqueName: \"kubernetes.io/projected/636b53c1-5764-44a8-99e4-e0b461c55943-kube-api-access-fqjgr\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041526 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5478630-cfe1-49d8-b597-7c2c9afec10f-serving-cert\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041545 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c66e4654-4ffd-4a39-8e1e-34d979249c94-config\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041567 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a130c6a7-c518-4407-82b0-eb291617a482-available-featuregates\") pod \"openshift-config-operator-7777fb866f-2w4fx\" (UID: \"a130c6a7-c518-4407-82b0-eb291617a482\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041586 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-encryption-config\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041608 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-audit-dir\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041630 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-config\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041651 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/99f30a12-2016-4dee-a4cc-02699b58d1a6-metrics-tls\") pod \"dns-operator-744455d44c-8wthb\" (UID: \"99f30a12-2016-4dee-a4cc-02699b58d1a6\") " pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041666 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pknk5\" (UniqueName: \"kubernetes.io/projected/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-kube-api-access-pknk5\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041691 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041708 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5478630-cfe1-49d8-b597-7c2c9afec10f-config\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041725 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8386d926-f309-4b23-aa26-1bd47506682f-config\") pod \"kube-controller-manager-operator-78b949d7b-8sjp7\" (UID: \"8386d926-f309-4b23-aa26-1bd47506682f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041743 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-config\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041573 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041808 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041588 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041803 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd17476b-94eb-4998-8477-50763c1d1222-serving-cert\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041712 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041944 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jfvj\" (UniqueName: \"kubernetes.io/projected/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-kube-api-access-7jfvj\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041976 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgds4\" (UniqueName: \"kubernetes.io/projected/dd17476b-94eb-4998-8477-50763c1d1222-kube-api-access-bgds4\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.041996 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c66e4654-4ffd-4a39-8e1e-34d979249c94-auth-proxy-config\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042013 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-etcd-client\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042042 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drdtc\" (UniqueName: \"kubernetes.io/projected/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-kube-api-access-drdtc\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042058 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042073 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-node-pullsecrets\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042092 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/636b53c1-5764-44a8-99e4-e0b461c55943-config\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042129 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-audit\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042162 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-image-import-ca\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042184 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wd9q\" (UniqueName: \"kubernetes.io/projected/a130c6a7-c518-4407-82b0-eb291617a482-kube-api-access-8wd9q\") pod \"openshift-config-operator-7777fb866f-2w4fx\" (UID: \"a130c6a7-c518-4407-82b0-eb291617a482\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042207 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-serving-cert\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042226 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmpps\" (UniqueName: \"kubernetes.io/projected/e5478630-cfe1-49d8-b597-7c2c9afec10f-kube-api-access-xmpps\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042247 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-etcd-serving-ca\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042287 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/636b53c1-5764-44a8-99e4-e0b461c55943-trusted-ca\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042311 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxsw5\" (UniqueName: \"kubernetes.io/projected/99f30a12-2016-4dee-a4cc-02699b58d1a6-kube-api-access-cxsw5\") pod \"dns-operator-744455d44c-8wthb\" (UID: \"99f30a12-2016-4dee-a4cc-02699b58d1a6\") " pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042293 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042346 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/636b53c1-5764-44a8-99e4-e0b461c55943-serving-cert\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042379 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8386d926-f309-4b23-aa26-1bd47506682f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8sjp7\" (UID: \"8386d926-f309-4b23-aa26-1bd47506682f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042409 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-images\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042478 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-client-ca\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042531 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e5478630-cfe1-49d8-b597-7c2c9afec10f-service-ca-bundle\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042557 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042575 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bwcc\" (UniqueName: \"kubernetes.io/projected/c66e4654-4ffd-4a39-8e1e-34d979249c94-kube-api-access-5bwcc\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042591 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042594 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd17476b-94eb-4998-8477-50763c1d1222-config\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042834 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-config\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042850 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c66e4654-4ffd-4a39-8e1e-34d979249c94-machine-approver-tls\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042873 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8386d926-f309-4b23-aa26-1bd47506682f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8sjp7\" (UID: \"8386d926-f309-4b23-aa26-1bd47506682f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.042889 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a130c6a7-c518-4407-82b0-eb291617a482-serving-cert\") pod \"openshift-config-operator-7777fb866f-2w4fx\" (UID: \"a130c6a7-c518-4407-82b0-eb291617a482\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.043092 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.043324 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.044528 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.045654 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.051828 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.052655 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.052875 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.053521 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-tqhv4"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.053992 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cs52k"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.054660 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.055495 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.055615 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.055866 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-tqhv4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.056000 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.056167 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.056397 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.059123 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.062448 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.062966 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-bbqwj"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.063388 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.063843 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.063958 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.064112 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.064154 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.067348 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.068008 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.075081 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.076923 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.080335 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.081750 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.101384 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.103856 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5vz8"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.105017 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.105094 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.105023 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.106864 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2pghx"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.106935 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.107371 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.107397 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-44vfm"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.107413 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-85m82"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.107583 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.108319 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-87z74"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.108341 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8wthb"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.108351 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-kk9sb"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.108367 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.108376 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vk9xf"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.108385 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sfbf9"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.108395 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.108491 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-85m82" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.110175 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dg8r2"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.112698 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.120653 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.122364 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.123378 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.124103 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.125104 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.129139 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-smljg"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.131665 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.138159 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.139014 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.139209 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-7q9qk"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.140108 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-7q9qk" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.142569 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cs52k"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143398 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a130c6a7-c518-4407-82b0-eb291617a482-available-featuregates\") pod \"openshift-config-operator-7777fb866f-2w4fx\" (UID: \"a130c6a7-c518-4407-82b0-eb291617a482\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143433 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-config\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143454 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-encryption-config\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143470 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-audit-dir\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143485 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/99f30a12-2016-4dee-a4cc-02699b58d1a6-metrics-tls\") pod \"dns-operator-744455d44c-8wthb\" (UID: \"99f30a12-2016-4dee-a4cc-02699b58d1a6\") " pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143500 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143517 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pknk5\" (UniqueName: \"kubernetes.io/projected/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-kube-api-access-pknk5\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143535 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5478630-cfe1-49d8-b597-7c2c9afec10f-config\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143552 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8386d926-f309-4b23-aa26-1bd47506682f-config\") pod \"kube-controller-manager-operator-78b949d7b-8sjp7\" (UID: \"8386d926-f309-4b23-aa26-1bd47506682f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143567 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd17476b-94eb-4998-8477-50763c1d1222-serving-cert\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143583 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-config\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143607 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-etcd-client\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143622 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jfvj\" (UniqueName: \"kubernetes.io/projected/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-kube-api-access-7jfvj\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143638 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgds4\" (UniqueName: \"kubernetes.io/projected/dd17476b-94eb-4998-8477-50763c1d1222-kube-api-access-bgds4\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143652 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c66e4654-4ffd-4a39-8e1e-34d979249c94-auth-proxy-config\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143667 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drdtc\" (UniqueName: \"kubernetes.io/projected/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-kube-api-access-drdtc\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143682 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143699 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/636b53c1-5764-44a8-99e4-e0b461c55943-config\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143715 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-node-pullsecrets\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143742 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-audit\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143758 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-image-import-ca\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143773 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-serving-cert\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143788 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmpps\" (UniqueName: \"kubernetes.io/projected/e5478630-cfe1-49d8-b597-7c2c9afec10f-kube-api-access-xmpps\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143803 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-etcd-serving-ca\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143829 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wd9q\" (UniqueName: \"kubernetes.io/projected/a130c6a7-c518-4407-82b0-eb291617a482-kube-api-access-8wd9q\") pod \"openshift-config-operator-7777fb866f-2w4fx\" (UID: \"a130c6a7-c518-4407-82b0-eb291617a482\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143857 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/636b53c1-5764-44a8-99e4-e0b461c55943-serving-cert\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143879 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/636b53c1-5764-44a8-99e4-e0b461c55943-trusted-ca\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143897 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxsw5\" (UniqueName: \"kubernetes.io/projected/99f30a12-2016-4dee-a4cc-02699b58d1a6-kube-api-access-cxsw5\") pod \"dns-operator-744455d44c-8wthb\" (UID: \"99f30a12-2016-4dee-a4cc-02699b58d1a6\") " pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143929 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8386d926-f309-4b23-aa26-1bd47506682f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8sjp7\" (UID: \"8386d926-f309-4b23-aa26-1bd47506682f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143949 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-images\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143970 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-client-ca\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.143996 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e5478630-cfe1-49d8-b597-7c2c9afec10f-service-ca-bundle\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144015 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144036 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd17476b-94eb-4998-8477-50763c1d1222-config\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144057 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bwcc\" (UniqueName: \"kubernetes.io/projected/c66e4654-4ffd-4a39-8e1e-34d979249c94-kube-api-access-5bwcc\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144079 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-config\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144098 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c66e4654-4ffd-4a39-8e1e-34d979249c94-machine-approver-tls\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144116 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8386d926-f309-4b23-aa26-1bd47506682f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8sjp7\" (UID: \"8386d926-f309-4b23-aa26-1bd47506682f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144135 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a130c6a7-c518-4407-82b0-eb291617a482-serving-cert\") pod \"openshift-config-operator-7777fb866f-2w4fx\" (UID: \"a130c6a7-c518-4407-82b0-eb291617a482\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144155 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dd17476b-94eb-4998-8477-50763c1d1222-etcd-client\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144174 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd17476b-94eb-4998-8477-50763c1d1222-etcd-service-ca\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144198 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/dd17476b-94eb-4998-8477-50763c1d1222-etcd-ca\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144218 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e5478630-cfe1-49d8-b597-7c2c9afec10f-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144238 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-serving-cert\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144260 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5478630-cfe1-49d8-b597-7c2c9afec10f-serving-cert\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144323 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqjgr\" (UniqueName: \"kubernetes.io/projected/636b53c1-5764-44a8-99e4-e0b461c55943-kube-api-access-fqjgr\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144346 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c66e4654-4ffd-4a39-8e1e-34d979249c94-config\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.144888 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c66e4654-4ffd-4a39-8e1e-34d979249c94-config\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.145188 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a130c6a7-c518-4407-82b0-eb291617a482-available-featuregates\") pod \"openshift-config-operator-7777fb866f-2w4fx\" (UID: \"a130c6a7-c518-4407-82b0-eb291617a482\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.145909 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-config\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.146024 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-audit-dir\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.148088 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-client-ca\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.148529 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/636b53c1-5764-44a8-99e4-e0b461c55943-trusted-ca\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.148533 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c66e4654-4ffd-4a39-8e1e-34d979249c94-auth-proxy-config\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.149062 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-images\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.149065 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5478630-cfe1-49d8-b597-7c2c9afec10f-config\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.149685 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd17476b-94eb-4998-8477-50763c1d1222-etcd-service-ca\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.150065 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.150088 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-config\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.150551 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e5478630-cfe1-49d8-b597-7c2c9afec10f-service-ca-bundle\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.150560 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd17476b-94eb-4998-8477-50763c1d1222-config\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.151442 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-node-pullsecrets\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.151725 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8386d926-f309-4b23-aa26-1bd47506682f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8sjp7\" (UID: \"8386d926-f309-4b23-aa26-1bd47506682f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.151778 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/636b53c1-5764-44a8-99e4-e0b461c55943-config\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.151956 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/dd17476b-94eb-4998-8477-50763c1d1222-etcd-ca\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.152320 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e5478630-cfe1-49d8-b597-7c2c9afec10f-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.152638 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8386d926-f309-4b23-aa26-1bd47506682f-config\") pod \"kube-controller-manager-operator-78b949d7b-8sjp7\" (UID: \"8386d926-f309-4b23-aa26-1bd47506682f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.153965 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dd17476b-94eb-4998-8477-50763c1d1222-etcd-client\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.154314 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/636b53c1-5764-44a8-99e4-e0b461c55943-serving-cert\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.154492 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5478630-cfe1-49d8-b597-7c2c9afec10f-serving-cert\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.154858 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c66e4654-4ffd-4a39-8e1e-34d979249c94-machine-approver-tls\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.155486 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-serving-cert\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.155868 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd17476b-94eb-4998-8477-50763c1d1222-serving-cert\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.156643 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a130c6a7-c518-4407-82b0-eb291617a482-serving-cert\") pod \"openshift-config-operator-7777fb866f-2w4fx\" (UID: \"a130c6a7-c518-4407-82b0-eb291617a482\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.160322 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.169297 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/99f30a12-2016-4dee-a4cc-02699b58d1a6-metrics-tls\") pod \"dns-operator-744455d44c-8wthb\" (UID: \"99f30a12-2016-4dee-a4cc-02699b58d1a6\") " pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.169760 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.170681 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.172015 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-wq452"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.174481 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.174514 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.174523 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-tqhv4"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.176298 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xpn8c"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.177327 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.178674 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.179421 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.179831 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.181288 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-2n9fx"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.182954 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.184107 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2pghx"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.186112 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5vz8"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.186140 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-7q9qk"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.186999 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.187974 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.189561 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-7mhmj"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.190592 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.190967 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wq59n"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.193197 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.193302 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.193558 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-85m82"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.194420 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wq59n"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.195376 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-9rdhl"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.196399 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9rdhl"] Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.196456 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.199587 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.202158 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-etcd-serving-ca\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.220256 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.228603 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-encryption-config\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.239689 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.259484 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.272767 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-etcd-client\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.279657 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.282115 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-image-import-ca\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.307580 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.309720 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.320041 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.339880 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.360109 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.362790 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-config\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.379447 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.389594 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-audit\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.400618 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.406395 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-serving-cert\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.420355 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.440762 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.460558 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.481371 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.501025 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.519975 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.548563 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tvbq\" (UniqueName: \"kubernetes.io/projected/ea193cb5-8e86-4628-a115-16a3987f4eaf-kube-api-access-9tvbq\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.548614 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msbgc\" (UniqueName: \"kubernetes.io/projected/2affec3b-57ec-4308-8c4f-6c5b6f94e541-kube-api-access-msbgc\") pod \"openshift-apiserver-operator-796bbdcf4f-6wx7c\" (UID: \"2affec3b-57ec-4308-8c4f-6c5b6f94e541\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.548647 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.548679 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.548773 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.548859 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.548898 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-dir\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.548940 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-audit-dir\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.548960 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-client-ca\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549045 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549088 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-tls\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549107 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549135 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549152 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549173 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hcjb\" (UniqueName: \"kubernetes.io/projected/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-kube-api-access-2hcjb\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549223 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-audit-policies\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549263 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbt6z\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-kube-api-access-pbt6z\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549310 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-etcd-client\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549334 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-serving-cert\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549361 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549379 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-config\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549403 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549438 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549455 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549472 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-bound-sa-token\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549491 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2affec3b-57ec-4308-8c4f-6c5b6f94e541-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6wx7c\" (UID: \"2affec3b-57ec-4308-8c4f-6c5b6f94e541\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549508 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msbtj\" (UniqueName: \"kubernetes.io/projected/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-kube-api-access-msbtj\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549522 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549540 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-encryption-config\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549579 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549603 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549628 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-certificates\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549648 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-serving-cert\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549666 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549695 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2affec3b-57ec-4308-8c4f-6c5b6f94e541-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6wx7c\" (UID: \"2affec3b-57ec-4308-8c4f-6c5b6f94e541\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549760 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-trusted-ca\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.549785 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-policies\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: E0202 16:52:28.550404 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.050386454 +0000 UTC m=+140.671990534 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.579776 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.599675 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.619397 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.649949 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650145 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650247 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650310 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-config\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: E0202 16:52:28.650360 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.150341921 +0000 UTC m=+140.771946001 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650428 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrrrj\" (UniqueName: \"kubernetes.io/projected/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-kube-api-access-qrrrj\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650455 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5213a65c-70c8-4bd5-8f3d-071943782bbf-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-76pc7\" (UID: \"5213a65c-70c8-4bd5-8f3d-071943782bbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650501 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/75c4b353-ddb5-4709-ab21-94059ac83671-metrics-certs\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650523 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-trusted-ca-bundle\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650545 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-serving-cert\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650627 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-csi-data-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650692 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg8gs\" (UniqueName: \"kubernetes.io/projected/aba12881-8bed-4d65-9f58-71b9e848a1cb-kube-api-access-jg8gs\") pod \"machine-config-controller-84d6567774-jsn2h\" (UID: \"aba12881-8bed-4d65-9f58-71b9e848a1cb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650752 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650826 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw48q\" (UniqueName: \"kubernetes.io/projected/6b160815-f795-4fb0-8f99-f6b3086709a5-kube-api-access-dw48q\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9gvn\" (UID: \"6b160815-f795-4fb0-8f99-f6b3086709a5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650865 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk4cn\" (UniqueName: \"kubernetes.io/projected/8951d83b-fadf-4d39-b1b9-9012f84066c2-kube-api-access-jk4cn\") pod \"service-ca-operator-777779d784-2pghx\" (UID: \"8951d83b-fadf-4d39-b1b9-9012f84066c2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650925 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2affec3b-57ec-4308-8c4f-6c5b6f94e541-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6wx7c\" (UID: \"2affec3b-57ec-4308-8c4f-6c5b6f94e541\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.650977 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dd8b41f5-efda-47ad-83ef-891bcf11fc70-srv-cert\") pod \"olm-operator-6b444d44fb-cs4g8\" (UID: \"dd8b41f5-efda-47ad-83ef-891bcf11fc70\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651015 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k5vz8\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651050 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-apiservice-cert\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651095 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651142 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651178 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-encryption-config\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651212 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e69421a3-e42a-4e87-a164-486a774179ff-metrics-tls\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651348 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651387 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e69421a3-e42a-4e87-a164-486a774179ff-trusted-ca\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651445 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aba12881-8bed-4d65-9f58-71b9e848a1cb-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jsn2h\" (UID: \"aba12881-8bed-4d65-9f58-71b9e848a1cb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651507 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-certificates\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651538 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/076fa5b0-3a9d-45dc-9a52-b8f986cde3e0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-52nq4\" (UID: \"076fa5b0-3a9d-45dc-9a52-b8f986cde3e0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651572 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651621 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-serving-cert\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651649 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651666 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsfz2\" (UniqueName: \"kubernetes.io/projected/acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f-kube-api-access-nsfz2\") pod \"control-plane-machine-set-operator-78cbb6b69f-txgvr\" (UID: \"acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651705 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0d67474c-95af-464f-b92a-4f2bc00dd1fd-secret-volume\") pod \"collect-profiles-29500845-cs788\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651743 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32591974-ca1a-4284-a32d-eb3eb856a9d5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-67bm4\" (UID: \"32591974-ca1a-4284-a32d-eb3eb856a9d5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651786 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/42bd3315-fcd5-4c23-ada1-0e35123d3b7c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-zsx6k\" (UID: \"42bd3315-fcd5-4c23-ada1-0e35123d3b7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651850 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l48s9\" (UniqueName: \"kubernetes.io/projected/09748673-fe28-490d-84f3-7c0170319531-kube-api-access-l48s9\") pod \"catalog-operator-68c6474976-s4vdz\" (UID: \"09748673-fe28-490d-84f3-7c0170319531\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651882 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-images\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651917 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w725d\" (UniqueName: \"kubernetes.io/projected/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-kube-api-access-w725d\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651946 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/09748673-fe28-490d-84f3-7c0170319531-profile-collector-cert\") pod \"catalog-operator-68c6474976-s4vdz\" (UID: \"09748673-fe28-490d-84f3-7c0170319531\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.651982 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dd8b41f5-efda-47ad-83ef-891bcf11fc70-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cs4g8\" (UID: \"dd8b41f5-efda-47ad-83ef-891bcf11fc70\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652023 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-serving-cert\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652089 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/41d9518a-1700-401e-96bd-3e855533c109-signing-cabundle\") pod \"service-ca-9c57cc56f-85m82\" (UID: \"41d9518a-1700-401e-96bd-3e855533c109\") " pod="openshift-service-ca/service-ca-9c57cc56f-85m82" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652107 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652130 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d67474c-95af-464f-b92a-4f2bc00dd1fd-config-volume\") pod \"collect-profiles-29500845-cs788\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652166 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-node-bootstrap-token\") pod \"machine-config-server-7mhmj\" (UID: \"73c03460-5e98-4103-9da2-de2cf1c391f5\") " pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652214 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7r96\" (UniqueName: \"kubernetes.io/projected/75c4b353-ddb5-4709-ab21-94059ac83671-kube-api-access-g7r96\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652249 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652322 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652370 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5213a65c-70c8-4bd5-8f3d-071943782bbf-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-76pc7\" (UID: \"5213a65c-70c8-4bd5-8f3d-071943782bbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652406 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgt2v\" (UniqueName: \"kubernetes.io/projected/e69421a3-e42a-4e87-a164-486a774179ff-kube-api-access-bgt2v\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652494 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/75c4b353-ddb5-4709-ab21-94059ac83671-service-ca-bundle\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652552 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp7nq\" (UniqueName: \"kubernetes.io/projected/41d9518a-1700-401e-96bd-3e855533c109-kube-api-access-tp7nq\") pod \"service-ca-9c57cc56f-85m82\" (UID: \"41d9518a-1700-401e-96bd-3e855533c109\") " pod="openshift-service-ca/service-ca-9c57cc56f-85m82" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652598 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/75c4b353-ddb5-4709-ab21-94059ac83671-default-certificate\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652645 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-mountpoint-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652760 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8951d83b-fadf-4d39-b1b9-9012f84066c2-config\") pod \"service-ca-operator-777779d784-2pghx\" (UID: \"8951d83b-fadf-4d39-b1b9-9012f84066c2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652807 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qtsx\" (UniqueName: \"kubernetes.io/projected/73c03460-5e98-4103-9da2-de2cf1c391f5-kube-api-access-8qtsx\") pod \"machine-config-server-7mhmj\" (UID: \"73c03460-5e98-4103-9da2-de2cf1c391f5\") " pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652846 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/41d9518a-1700-401e-96bd-3e855533c109-signing-key\") pod \"service-ca-9c57cc56f-85m82\" (UID: \"41d9518a-1700-401e-96bd-3e855533c109\") " pod="openshift-service-ca/service-ca-9c57cc56f-85m82" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652879 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-tmpfs\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.652981 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2b288d3c-3c0e-4290-819a-d5c1e7eedcf0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cs52k\" (UID: \"2b288d3c-3c0e-4290-819a-d5c1e7eedcf0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653064 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/076fa5b0-3a9d-45dc-9a52-b8f986cde3e0-config\") pod \"kube-apiserver-operator-766d6c64bb-52nq4\" (UID: \"076fa5b0-3a9d-45dc-9a52-b8f986cde3e0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653100 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd6cd\" (UniqueName: \"kubernetes.io/projected/dd8b41f5-efda-47ad-83ef-891bcf11fc70-kube-api-access-dd6cd\") pod \"olm-operator-6b444d44fb-cs4g8\" (UID: \"dd8b41f5-efda-47ad-83ef-891bcf11fc70\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653135 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-audit-policies\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653168 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653204 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hcjb\" (UniqueName: \"kubernetes.io/projected/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-kube-api-access-2hcjb\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653241 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-service-ca\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653299 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbt6z\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-kube-api-access-pbt6z\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653335 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd6r8\" (UniqueName: \"kubernetes.io/projected/20b9b4db-66e7-4732-986a-5b766e8bc6fb-kube-api-access-sd6r8\") pod \"cluster-samples-operator-665b6dd947-nwzzt\" (UID: \"20b9b4db-66e7-4732-986a-5b766e8bc6fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653446 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-etcd-client\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653482 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-certs\") pod \"machine-config-server-7mhmj\" (UID: \"73c03460-5e98-4103-9da2-de2cf1c391f5\") " pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653517 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/75c4b353-ddb5-4709-ab21-94059ac83671-stats-auth\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653549 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32591974-ca1a-4284-a32d-eb3eb856a9d5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-67bm4\" (UID: \"32591974-ca1a-4284-a32d-eb3eb856a9d5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653581 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-oauth-config\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653619 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653652 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-config\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:28 crc kubenswrapper[4835]: E0202 16:52:28.653671 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.153646968 +0000 UTC m=+140.775251088 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653779 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj6ls\" (UniqueName: \"kubernetes.io/projected/2b288d3c-3c0e-4290-819a-d5c1e7eedcf0-kube-api-access-dj6ls\") pod \"multus-admission-controller-857f4d67dd-cs52k\" (UID: \"2b288d3c-3c0e-4290-819a-d5c1e7eedcf0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653829 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b160815-f795-4fb0-8f99-f6b3086709a5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9gvn\" (UID: \"6b160815-f795-4fb0-8f99-f6b3086709a5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653868 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msbtj\" (UniqueName: \"kubernetes.io/projected/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-kube-api-access-msbtj\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653904 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.653946 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-plugins-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654149 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-certificates\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654426 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/09748673-fe28-490d-84f3-7c0170319531-srv-cert\") pod \"catalog-operator-68c6474976-s4vdz\" (UID: \"09748673-fe28-490d-84f3-7c0170319531\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654482 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-bound-sa-token\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654510 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84mrm\" (UniqueName: \"kubernetes.io/projected/7b9bef11-2ef9-4bad-b548-c86f910ce019-kube-api-access-84mrm\") pod \"ingress-canary-7q9qk\" (UID: \"7b9bef11-2ef9-4bad-b548-c86f910ce019\") " pod="openshift-ingress-canary/ingress-canary-7q9qk" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654537 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdcj9\" (UniqueName: \"kubernetes.io/projected/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-kube-api-access-vdcj9\") pod \"dns-default-9rdhl\" (UID: \"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1\") " pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654559 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8951d83b-fadf-4d39-b1b9-9012f84066c2-serving-cert\") pod \"service-ca-operator-777779d784-2pghx\" (UID: \"8951d83b-fadf-4d39-b1b9-9012f84066c2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654623 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5bcz\" (UniqueName: \"kubernetes.io/projected/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-kube-api-access-n5bcz\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654680 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vd9f\" (UniqueName: \"kubernetes.io/projected/9e36008e-f103-4e5a-9543-6d3ef330d446-kube-api-access-8vd9f\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654708 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-proxy-tls\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654730 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hprv6\" (UniqueName: \"kubernetes.io/projected/0d67474c-95af-464f-b92a-4f2bc00dd1fd-kube-api-access-hprv6\") pod \"collect-profiles-29500845-cs788\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654771 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbz4x\" (UniqueName: \"kubernetes.io/projected/5213a65c-70c8-4bd5-8f3d-071943782bbf-kube-api-access-kbz4x\") pod \"openshift-controller-manager-operator-756b6f6bc6-76pc7\" (UID: \"5213a65c-70c8-4bd5-8f3d-071943782bbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654814 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.654848 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.655073 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-serving-cert\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.655451 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.655784 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2affec3b-57ec-4308-8c4f-6c5b6f94e541-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6wx7c\" (UID: \"2affec3b-57ec-4308-8c4f-6c5b6f94e541\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.655863 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2affec3b-57ec-4308-8c4f-6c5b6f94e541-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6wx7c\" (UID: \"2affec3b-57ec-4308-8c4f-6c5b6f94e541\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.655896 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-txgvr\" (UID: \"acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.655922 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpcpc\" (UniqueName: \"kubernetes.io/projected/1bfd176d-b2cc-45f8-a80d-61e391f25163-kube-api-access-kpcpc\") pod \"marketplace-operator-79b997595-k5vz8\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.655947 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-oauth-serving-cert\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.656069 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.656404 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.656805 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-trusted-ca\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.656919 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-webhook-cert\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.656980 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-policies\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657056 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e69421a3-e42a-4e87-a164-486a774179ff-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657139 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msbgc\" (UniqueName: \"kubernetes.io/projected/2affec3b-57ec-4308-8c4f-6c5b6f94e541-kube-api-access-msbgc\") pod \"openshift-apiserver-operator-796bbdcf4f-6wx7c\" (UID: \"2affec3b-57ec-4308-8c4f-6c5b6f94e541\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657152 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-serving-cert\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657255 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657319 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tvbq\" (UniqueName: \"kubernetes.io/projected/ea193cb5-8e86-4628-a115-16a3987f4eaf-kube-api-access-9tvbq\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657348 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw2qb\" (UniqueName: \"kubernetes.io/projected/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-kube-api-access-tw2qb\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657395 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-socket-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657417 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlg74\" (UniqueName: \"kubernetes.io/projected/42bd3315-fcd5-4c23-ada1-0e35123d3b7c-kube-api-access-dlg74\") pod \"package-server-manager-789f6589d5-zsx6k\" (UID: \"42bd3315-fcd5-4c23-ada1-0e35123d3b7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657463 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2affec3b-57ec-4308-8c4f-6c5b6f94e541-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6wx7c\" (UID: \"2affec3b-57ec-4308-8c4f-6c5b6f94e541\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657487 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657560 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657585 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj959\" (UniqueName: \"kubernetes.io/projected/6b48c81d-0f0b-497d-83c4-8f495a859829-kube-api-access-tj959\") pod \"migrator-59844c95c7-wvnqh\" (UID: \"6b48c81d-0f0b-497d-83c4-8f495a859829\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657633 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-dir\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657660 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b160815-f795-4fb0-8f99-f6b3086709a5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9gvn\" (UID: \"6b160815-f795-4fb0-8f99-f6b3086709a5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657682 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/32591974-ca1a-4284-a32d-eb3eb856a9d5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-67bm4\" (UID: \"32591974-ca1a-4284-a32d-eb3eb856a9d5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657756 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/20b9b4db-66e7-4732-986a-5b766e8bc6fb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nwzzt\" (UID: \"20b9b4db-66e7-4732-986a-5b766e8bc6fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657800 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-config-volume\") pod \"dns-default-9rdhl\" (UID: \"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1\") " pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657822 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-metrics-tls\") pod \"dns-default-9rdhl\" (UID: \"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1\") " pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657887 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-audit-dir\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.657911 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-client-ca\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.658047 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.658384 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-config\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.658636 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7b9bef11-2ef9-4bad-b548-c86f910ce019-cert\") pod \"ingress-canary-7q9qk\" (UID: \"7b9bef11-2ef9-4bad-b548-c86f910ce019\") " pod="openshift-ingress-canary/ingress-canary-7q9qk" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.658735 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.658762 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkxz5\" (UniqueName: \"kubernetes.io/projected/2670e3af-3faf-4aa2-8674-ad7b94955ef0-kube-api-access-wkxz5\") pod \"downloads-7954f5f757-tqhv4\" (UID: \"2670e3af-3faf-4aa2-8674-ad7b94955ef0\") " pod="openshift-console/downloads-7954f5f757-tqhv4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.658852 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-tls\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.658901 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-registration-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.658925 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.658977 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/076fa5b0-3a9d-45dc-9a52-b8f986cde3e0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-52nq4\" (UID: \"076fa5b0-3a9d-45dc-9a52-b8f986cde3e0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.659000 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k5vz8\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.659017 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.659055 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aba12881-8bed-4d65-9f58-71b9e848a1cb-proxy-tls\") pod \"machine-config-controller-84d6567774-jsn2h\" (UID: \"aba12881-8bed-4d65-9f58-71b9e848a1cb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.659074 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.659165 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.659179 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.659479 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-audit-policies\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.659774 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-policies\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.660532 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.660558 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-client-ca\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.660980 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.660983 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-audit-dir\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.661564 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-dir\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.663505 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.663523 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-etcd-client\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.664403 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.664570 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-trusted-ca\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.665403 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.665798 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.667858 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-encryption-config\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.669168 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.672366 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-tls\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.674325 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.679771 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.699809 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.720004 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.739874 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.759977 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: E0202 16:52:28.760563 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.260532308 +0000 UTC m=+140.882136388 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.760983 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761256 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-webhook-cert\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761310 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e69421a3-e42a-4e87-a164-486a774179ff-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761346 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw2qb\" (UniqueName: \"kubernetes.io/projected/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-kube-api-access-tw2qb\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761377 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761402 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-socket-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761424 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlg74\" (UniqueName: \"kubernetes.io/projected/42bd3315-fcd5-4c23-ada1-0e35123d3b7c-kube-api-access-dlg74\") pod \"package-server-manager-789f6589d5-zsx6k\" (UID: \"42bd3315-fcd5-4c23-ada1-0e35123d3b7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761445 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj959\" (UniqueName: \"kubernetes.io/projected/6b48c81d-0f0b-497d-83c4-8f495a859829-kube-api-access-tj959\") pod \"migrator-59844c95c7-wvnqh\" (UID: \"6b48c81d-0f0b-497d-83c4-8f495a859829\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761475 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b160815-f795-4fb0-8f99-f6b3086709a5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9gvn\" (UID: \"6b160815-f795-4fb0-8f99-f6b3086709a5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761496 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/32591974-ca1a-4284-a32d-eb3eb856a9d5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-67bm4\" (UID: \"32591974-ca1a-4284-a32d-eb3eb856a9d5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761519 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/20b9b4db-66e7-4732-986a-5b766e8bc6fb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nwzzt\" (UID: \"20b9b4db-66e7-4732-986a-5b766e8bc6fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761540 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-config-volume\") pod \"dns-default-9rdhl\" (UID: \"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1\") " pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761558 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-metrics-tls\") pod \"dns-default-9rdhl\" (UID: \"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1\") " pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761578 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7b9bef11-2ef9-4bad-b548-c86f910ce019-cert\") pod \"ingress-canary-7q9qk\" (UID: \"7b9bef11-2ef9-4bad-b548-c86f910ce019\") " pod="openshift-ingress-canary/ingress-canary-7q9qk" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761603 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkxz5\" (UniqueName: \"kubernetes.io/projected/2670e3af-3faf-4aa2-8674-ad7b94955ef0-kube-api-access-wkxz5\") pod \"downloads-7954f5f757-tqhv4\" (UID: \"2670e3af-3faf-4aa2-8674-ad7b94955ef0\") " pod="openshift-console/downloads-7954f5f757-tqhv4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761633 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-registration-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761657 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/076fa5b0-3a9d-45dc-9a52-b8f986cde3e0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-52nq4\" (UID: \"076fa5b0-3a9d-45dc-9a52-b8f986cde3e0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761679 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k5vz8\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761706 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761727 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aba12881-8bed-4d65-9f58-71b9e848a1cb-proxy-tls\") pod \"machine-config-controller-84d6567774-jsn2h\" (UID: \"aba12881-8bed-4d65-9f58-71b9e848a1cb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761741 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-socket-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761750 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761812 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrrrj\" (UniqueName: \"kubernetes.io/projected/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-kube-api-access-qrrrj\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761837 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-config\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761863 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5213a65c-70c8-4bd5-8f3d-071943782bbf-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-76pc7\" (UID: \"5213a65c-70c8-4bd5-8f3d-071943782bbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761883 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-trusted-ca-bundle\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761899 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/75c4b353-ddb5-4709-ab21-94059ac83671-metrics-certs\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761916 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-csi-data-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761939 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg8gs\" (UniqueName: \"kubernetes.io/projected/aba12881-8bed-4d65-9f58-71b9e848a1cb-kube-api-access-jg8gs\") pod \"machine-config-controller-84d6567774-jsn2h\" (UID: \"aba12881-8bed-4d65-9f58-71b9e848a1cb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761961 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw48q\" (UniqueName: \"kubernetes.io/projected/6b160815-f795-4fb0-8f99-f6b3086709a5-kube-api-access-dw48q\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9gvn\" (UID: \"6b160815-f795-4fb0-8f99-f6b3086709a5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.761986 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk4cn\" (UniqueName: \"kubernetes.io/projected/8951d83b-fadf-4d39-b1b9-9012f84066c2-kube-api-access-jk4cn\") pod \"service-ca-operator-777779d784-2pghx\" (UID: \"8951d83b-fadf-4d39-b1b9-9012f84066c2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762008 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dd8b41f5-efda-47ad-83ef-891bcf11fc70-srv-cert\") pod \"olm-operator-6b444d44fb-cs4g8\" (UID: \"dd8b41f5-efda-47ad-83ef-891bcf11fc70\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762023 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k5vz8\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762041 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-apiservice-cert\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762061 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e69421a3-e42a-4e87-a164-486a774179ff-metrics-tls\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762086 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762104 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e69421a3-e42a-4e87-a164-486a774179ff-trusted-ca\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762128 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aba12881-8bed-4d65-9f58-71b9e848a1cb-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jsn2h\" (UID: \"aba12881-8bed-4d65-9f58-71b9e848a1cb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762145 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/076fa5b0-3a9d-45dc-9a52-b8f986cde3e0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-52nq4\" (UID: \"076fa5b0-3a9d-45dc-9a52-b8f986cde3e0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762178 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32591974-ca1a-4284-a32d-eb3eb856a9d5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-67bm4\" (UID: \"32591974-ca1a-4284-a32d-eb3eb856a9d5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762196 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsfz2\" (UniqueName: \"kubernetes.io/projected/acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f-kube-api-access-nsfz2\") pod \"control-plane-machine-set-operator-78cbb6b69f-txgvr\" (UID: \"acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762210 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0d67474c-95af-464f-b92a-4f2bc00dd1fd-secret-volume\") pod \"collect-profiles-29500845-cs788\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762244 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/42bd3315-fcd5-4c23-ada1-0e35123d3b7c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-zsx6k\" (UID: \"42bd3315-fcd5-4c23-ada1-0e35123d3b7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762264 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l48s9\" (UniqueName: \"kubernetes.io/projected/09748673-fe28-490d-84f3-7c0170319531-kube-api-access-l48s9\") pod \"catalog-operator-68c6474976-s4vdz\" (UID: \"09748673-fe28-490d-84f3-7c0170319531\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762294 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-images\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762308 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/09748673-fe28-490d-84f3-7c0170319531-profile-collector-cert\") pod \"catalog-operator-68c6474976-s4vdz\" (UID: \"09748673-fe28-490d-84f3-7c0170319531\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762323 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dd8b41f5-efda-47ad-83ef-891bcf11fc70-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cs4g8\" (UID: \"dd8b41f5-efda-47ad-83ef-891bcf11fc70\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762330 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762338 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w725d\" (UniqueName: \"kubernetes.io/projected/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-kube-api-access-w725d\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762389 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-serving-cert\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762417 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/41d9518a-1700-401e-96bd-3e855533c109-signing-cabundle\") pod \"service-ca-9c57cc56f-85m82\" (UID: \"41d9518a-1700-401e-96bd-3e855533c109\") " pod="openshift-service-ca/service-ca-9c57cc56f-85m82" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762435 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d67474c-95af-464f-b92a-4f2bc00dd1fd-config-volume\") pod \"collect-profiles-29500845-cs788\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762454 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-node-bootstrap-token\") pod \"machine-config-server-7mhmj\" (UID: \"73c03460-5e98-4103-9da2-de2cf1c391f5\") " pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762480 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7r96\" (UniqueName: \"kubernetes.io/projected/75c4b353-ddb5-4709-ab21-94059ac83671-kube-api-access-g7r96\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762502 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5213a65c-70c8-4bd5-8f3d-071943782bbf-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-76pc7\" (UID: \"5213a65c-70c8-4bd5-8f3d-071943782bbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762524 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgt2v\" (UniqueName: \"kubernetes.io/projected/e69421a3-e42a-4e87-a164-486a774179ff-kube-api-access-bgt2v\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762543 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp7nq\" (UniqueName: \"kubernetes.io/projected/41d9518a-1700-401e-96bd-3e855533c109-kube-api-access-tp7nq\") pod \"service-ca-9c57cc56f-85m82\" (UID: \"41d9518a-1700-401e-96bd-3e855533c109\") " pod="openshift-service-ca/service-ca-9c57cc56f-85m82" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762560 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/75c4b353-ddb5-4709-ab21-94059ac83671-default-certificate\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762577 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/75c4b353-ddb5-4709-ab21-94059ac83671-service-ca-bundle\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762598 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-mountpoint-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762616 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8951d83b-fadf-4d39-b1b9-9012f84066c2-config\") pod \"service-ca-operator-777779d784-2pghx\" (UID: \"8951d83b-fadf-4d39-b1b9-9012f84066c2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762633 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qtsx\" (UniqueName: \"kubernetes.io/projected/73c03460-5e98-4103-9da2-de2cf1c391f5-kube-api-access-8qtsx\") pod \"machine-config-server-7mhmj\" (UID: \"73c03460-5e98-4103-9da2-de2cf1c391f5\") " pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762649 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/41d9518a-1700-401e-96bd-3e855533c109-signing-key\") pod \"service-ca-9c57cc56f-85m82\" (UID: \"41d9518a-1700-401e-96bd-3e855533c109\") " pod="openshift-service-ca/service-ca-9c57cc56f-85m82" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762664 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-tmpfs\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762679 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/076fa5b0-3a9d-45dc-9a52-b8f986cde3e0-config\") pod \"kube-apiserver-operator-766d6c64bb-52nq4\" (UID: \"076fa5b0-3a9d-45dc-9a52-b8f986cde3e0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762694 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2b288d3c-3c0e-4290-819a-d5c1e7eedcf0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cs52k\" (UID: \"2b288d3c-3c0e-4290-819a-d5c1e7eedcf0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762715 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd6cd\" (UniqueName: \"kubernetes.io/projected/dd8b41f5-efda-47ad-83ef-891bcf11fc70-kube-api-access-dd6cd\") pod \"olm-operator-6b444d44fb-cs4g8\" (UID: \"dd8b41f5-efda-47ad-83ef-891bcf11fc70\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762733 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-service-ca\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762750 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd6r8\" (UniqueName: \"kubernetes.io/projected/20b9b4db-66e7-4732-986a-5b766e8bc6fb-kube-api-access-sd6r8\") pod \"cluster-samples-operator-665b6dd947-nwzzt\" (UID: \"20b9b4db-66e7-4732-986a-5b766e8bc6fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762774 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/75c4b353-ddb5-4709-ab21-94059ac83671-stats-auth\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:28 crc kubenswrapper[4835]: E0202 16:52:28.762807 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.262783924 +0000 UTC m=+140.884388044 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762866 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-registration-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762873 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-certs\") pod \"machine-config-server-7mhmj\" (UID: \"73c03460-5e98-4103-9da2-de2cf1c391f5\") " pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762937 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-oauth-config\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.762986 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32591974-ca1a-4284-a32d-eb3eb856a9d5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-67bm4\" (UID: \"32591974-ca1a-4284-a32d-eb3eb856a9d5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763049 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b160815-f795-4fb0-8f99-f6b3086709a5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9gvn\" (UID: \"6b160815-f795-4fb0-8f99-f6b3086709a5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763065 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-config\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763088 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj6ls\" (UniqueName: \"kubernetes.io/projected/2b288d3c-3c0e-4290-819a-d5c1e7eedcf0-kube-api-access-dj6ls\") pod \"multus-admission-controller-857f4d67dd-cs52k\" (UID: \"2b288d3c-3c0e-4290-819a-d5c1e7eedcf0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763143 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-plugins-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763188 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/09748673-fe28-490d-84f3-7c0170319531-srv-cert\") pod \"catalog-operator-68c6474976-s4vdz\" (UID: \"09748673-fe28-490d-84f3-7c0170319531\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763234 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84mrm\" (UniqueName: \"kubernetes.io/projected/7b9bef11-2ef9-4bad-b548-c86f910ce019-kube-api-access-84mrm\") pod \"ingress-canary-7q9qk\" (UID: \"7b9bef11-2ef9-4bad-b548-c86f910ce019\") " pod="openshift-ingress-canary/ingress-canary-7q9qk" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763312 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdcj9\" (UniqueName: \"kubernetes.io/projected/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-kube-api-access-vdcj9\") pod \"dns-default-9rdhl\" (UID: \"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1\") " pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763359 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8951d83b-fadf-4d39-b1b9-9012f84066c2-serving-cert\") pod \"service-ca-operator-777779d784-2pghx\" (UID: \"8951d83b-fadf-4d39-b1b9-9012f84066c2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763404 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5bcz\" (UniqueName: \"kubernetes.io/projected/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-kube-api-access-n5bcz\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763461 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vd9f\" (UniqueName: \"kubernetes.io/projected/9e36008e-f103-4e5a-9543-6d3ef330d446-kube-api-access-8vd9f\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763516 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-proxy-tls\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763564 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbz4x\" (UniqueName: \"kubernetes.io/projected/5213a65c-70c8-4bd5-8f3d-071943782bbf-kube-api-access-kbz4x\") pod \"openshift-controller-manager-operator-756b6f6bc6-76pc7\" (UID: \"5213a65c-70c8-4bd5-8f3d-071943782bbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763614 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hprv6\" (UniqueName: \"kubernetes.io/projected/0d67474c-95af-464f-b92a-4f2bc00dd1fd-kube-api-access-hprv6\") pod \"collect-profiles-29500845-cs788\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763666 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763724 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-txgvr\" (UID: \"acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763780 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpcpc\" (UniqueName: \"kubernetes.io/projected/1bfd176d-b2cc-45f8-a80d-61e391f25163-kube-api-access-kpcpc\") pod \"marketplace-operator-79b997595-k5vz8\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763830 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-oauth-serving-cert\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.764084 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e69421a3-e42a-4e87-a164-486a774179ff-trusted-ca\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.764573 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-csi-data-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.764599 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aba12881-8bed-4d65-9f58-71b9e848a1cb-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jsn2h\" (UID: \"aba12881-8bed-4d65-9f58-71b9e848a1cb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.764691 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-trusted-ca-bundle\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.764792 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.764802 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-images\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.764892 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-mountpoint-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.765100 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/9e36008e-f103-4e5a-9543-6d3ef330d446-plugins-dir\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.763730 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5213a65c-70c8-4bd5-8f3d-071943782bbf-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-76pc7\" (UID: \"5213a65c-70c8-4bd5-8f3d-071943782bbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.765857 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-tmpfs\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.765859 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/20b9b4db-66e7-4732-986a-5b766e8bc6fb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nwzzt\" (UID: \"20b9b4db-66e7-4732-986a-5b766e8bc6fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.766010 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-service-ca\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.766013 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/076fa5b0-3a9d-45dc-9a52-b8f986cde3e0-config\") pod \"kube-apiserver-operator-766d6c64bb-52nq4\" (UID: \"076fa5b0-3a9d-45dc-9a52-b8f986cde3e0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.766567 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.767440 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-txgvr\" (UID: \"acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.767580 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-oauth-serving-cert\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.767639 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-proxy-tls\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.768129 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e69421a3-e42a-4e87-a164-486a774179ff-metrics-tls\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.768714 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5213a65c-70c8-4bd5-8f3d-071943782bbf-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-76pc7\" (UID: \"5213a65c-70c8-4bd5-8f3d-071943782bbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.770067 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-serving-cert\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.770392 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/076fa5b0-3a9d-45dc-9a52-b8f986cde3e0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-52nq4\" (UID: \"076fa5b0-3a9d-45dc-9a52-b8f986cde3e0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.772040 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-oauth-config\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.779795 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.799264 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.810140 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32591974-ca1a-4284-a32d-eb3eb856a9d5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-67bm4\" (UID: \"32591974-ca1a-4284-a32d-eb3eb856a9d5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.819937 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.823466 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b160815-f795-4fb0-8f99-f6b3086709a5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9gvn\" (UID: \"6b160815-f795-4fb0-8f99-f6b3086709a5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.839580 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.846725 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32591974-ca1a-4284-a32d-eb3eb856a9d5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-67bm4\" (UID: \"32591974-ca1a-4284-a32d-eb3eb856a9d5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.860829 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.864515 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:28 crc kubenswrapper[4835]: E0202 16:52:28.864644 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.364622036 +0000 UTC m=+140.986226116 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.864787 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: E0202 16:52:28.865446 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.36543671 +0000 UTC m=+140.987040870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.879470 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.888567 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b160815-f795-4fb0-8f99-f6b3086709a5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9gvn\" (UID: \"6b160815-f795-4fb0-8f99-f6b3086709a5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.900466 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.920756 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.940462 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.960148 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.966844 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:28 crc kubenswrapper[4835]: E0202 16:52:28.966983 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.466964914 +0000 UTC m=+141.088568994 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.967702 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:28 crc kubenswrapper[4835]: E0202 16:52:28.968059 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.468021775 +0000 UTC m=+141.089625965 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.969136 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2b288d3c-3c0e-4290-819a-d5c1e7eedcf0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cs52k\" (UID: \"2b288d3c-3c0e-4290-819a-d5c1e7eedcf0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.979697 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 02 16:52:28 crc kubenswrapper[4835]: I0202 16:52:28.999569 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.020214 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.041338 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.048475 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0d67474c-95af-464f-b92a-4f2bc00dd1fd-secret-volume\") pod \"collect-profiles-29500845-cs788\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.049554 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dd8b41f5-efda-47ad-83ef-891bcf11fc70-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cs4g8\" (UID: \"dd8b41f5-efda-47ad-83ef-891bcf11fc70\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.050183 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/09748673-fe28-490d-84f3-7c0170319531-profile-collector-cert\") pod \"catalog-operator-68c6474976-s4vdz\" (UID: \"09748673-fe28-490d-84f3-7c0170319531\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.060897 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.069405 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.069657 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.5696152 +0000 UTC m=+141.191219280 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.070112 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.070599 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.570586318 +0000 UTC m=+141.192190398 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.077939 4835 request.go:700] Waited for 1.013662983s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dolm-operator-serving-cert&limit=500&resourceVersion=0 Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.079116 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.086928 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dd8b41f5-efda-47ad-83ef-891bcf11fc70-srv-cert\") pod \"olm-operator-6b444d44fb-cs4g8\" (UID: \"dd8b41f5-efda-47ad-83ef-891bcf11fc70\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.100021 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.120219 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.139999 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.163657 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.172109 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.172714 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.672626306 +0000 UTC m=+141.294230436 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.173008 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.173706 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.673683537 +0000 UTC m=+141.295287657 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.180708 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.200798 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.211644 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/75c4b353-ddb5-4709-ab21-94059ac83671-default-certificate\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.219833 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.228008 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/75c4b353-ddb5-4709-ab21-94059ac83671-stats-auth\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.240402 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.249074 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aba12881-8bed-4d65-9f58-71b9e848a1cb-proxy-tls\") pod \"machine-config-controller-84d6567774-jsn2h\" (UID: \"aba12881-8bed-4d65-9f58-71b9e848a1cb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.260840 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.269746 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/75c4b353-ddb5-4709-ab21-94059ac83671-metrics-certs\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.274894 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.275730 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.775689644 +0000 UTC m=+141.397293764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.279593 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.299528 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.320026 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.327201 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/75c4b353-ddb5-4709-ab21-94059ac83671-service-ca-bundle\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.340913 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.347586 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-webhook-cert\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.348772 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-apiservice-cert\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.361104 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.368129 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/42bd3315-fcd5-4c23-ada1-0e35123d3b7c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-zsx6k\" (UID: \"42bd3315-fcd5-4c23-ada1-0e35123d3b7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.379952 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.380605 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.880578766 +0000 UTC m=+141.502182856 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.383043 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.388718 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k5vz8\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.410107 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.415833 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k5vz8\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.419897 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.440358 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.460746 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.479151 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.480829 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.481693 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:29.981658146 +0000 UTC m=+141.603262246 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.488528 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/09748673-fe28-490d-84f3-7c0170319531-srv-cert\") pod \"catalog-operator-68c6474976-s4vdz\" (UID: \"09748673-fe28-490d-84f3-7c0170319531\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.500423 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.506663 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d67474c-95af-464f-b92a-4f2bc00dd1fd-config-volume\") pod \"collect-profiles-29500845-cs788\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.520605 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.540126 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.560440 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.580219 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.583657 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.584025 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.084010893 +0000 UTC m=+141.705614963 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.588974 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8951d83b-fadf-4d39-b1b9-9012f84066c2-serving-cert\") pod \"service-ca-operator-777779d784-2pghx\" (UID: \"8951d83b-fadf-4d39-b1b9-9012f84066c2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.600344 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.605856 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8951d83b-fadf-4d39-b1b9-9012f84066c2-config\") pod \"service-ca-operator-777779d784-2pghx\" (UID: \"8951d83b-fadf-4d39-b1b9-9012f84066c2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.619867 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.639042 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.660157 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.670223 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/41d9518a-1700-401e-96bd-3e855533c109-signing-key\") pod \"service-ca-9c57cc56f-85m82\" (UID: \"41d9518a-1700-401e-96bd-3e855533c109\") " pod="openshift-service-ca/service-ca-9c57cc56f-85m82" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.680259 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.685227 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.685703 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.185676111 +0000 UTC m=+141.807280191 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.685795 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.686541 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.186508525 +0000 UTC m=+141.808112615 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.699906 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.706640 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/41d9518a-1700-401e-96bd-3e855533c109-signing-cabundle\") pod \"service-ca-9c57cc56f-85m82\" (UID: \"41d9518a-1700-401e-96bd-3e855533c109\") " pod="openshift-service-ca/service-ca-9c57cc56f-85m82" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.720296 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.740392 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.759768 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.763973 4835 configmap.go:193] Couldn't get configMap openshift-dns/dns-default: failed to sync configmap cache: timed out waiting for the condition Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.764002 4835 secret.go:188] Couldn't get secret openshift-dns/dns-default-metrics-tls: failed to sync secret cache: timed out waiting for the condition Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.764066 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-config-volume podName:b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1 nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.264046293 +0000 UTC m=+141.885650373 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-config-volume") pod "dns-default-9rdhl" (UID: "b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1") : failed to sync configmap cache: timed out waiting for the condition Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.764093 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-metrics-tls podName:b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1 nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.264080024 +0000 UTC m=+141.885684114 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-metrics-tls") pod "dns-default-9rdhl" (UID: "b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1") : failed to sync secret cache: timed out waiting for the condition Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.765165 4835 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.765308 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-certs podName:73c03460-5e98-4103-9da2-de2cf1c391f5 nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.265253449 +0000 UTC m=+141.886857569 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-certs") pod "machine-config-server-7mhmj" (UID: "73c03460-5e98-4103-9da2-de2cf1c391f5") : failed to sync secret cache: timed out waiting for the condition Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.765844 4835 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.765892 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-node-bootstrap-token podName:73c03460-5e98-4103-9da2-de2cf1c391f5 nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.265883147 +0000 UTC m=+141.887487227 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-node-bootstrap-token") pod "machine-config-server-7mhmj" (UID: "73c03460-5e98-4103-9da2-de2cf1c391f5") : failed to sync secret cache: timed out waiting for the condition Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.770801 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7b9bef11-2ef9-4bad-b548-c86f910ce019-cert\") pod \"ingress-canary-7q9qk\" (UID: \"7b9bef11-2ef9-4bad-b548-c86f910ce019\") " pod="openshift-ingress-canary/ingress-canary-7q9qk" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.779546 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.787453 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.787809 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.287777581 +0000 UTC m=+141.909381701 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.788718 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.789216 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.289196132 +0000 UTC m=+141.910800252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.799626 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.840930 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxsw5\" (UniqueName: \"kubernetes.io/projected/99f30a12-2016-4dee-a4cc-02699b58d1a6-kube-api-access-cxsw5\") pod \"dns-operator-744455d44c-8wthb\" (UID: \"99f30a12-2016-4dee-a4cc-02699b58d1a6\") " pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.858469 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pknk5\" (UniqueName: \"kubernetes.io/projected/d12f85a7-4683-4ad1-aa7c-0c30b52f976f-kube-api-access-pknk5\") pod \"apiserver-76f77b778f-wq452\" (UID: \"d12f85a7-4683-4ad1-aa7c-0c30b52f976f\") " pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.877946 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drdtc\" (UniqueName: \"kubernetes.io/projected/67a8f4cf-ff9c-48ab-92dd-b2e096ab4192-kube-api-access-drdtc\") pod \"machine-api-operator-5694c8668f-44vfm\" (UID: \"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.890099 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.890319 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.390262032 +0000 UTC m=+142.011866122 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.890502 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.890836 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.390823428 +0000 UTC m=+142.012427508 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.892838 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bwcc\" (UniqueName: \"kubernetes.io/projected/c66e4654-4ffd-4a39-8e1e-34d979249c94-kube-api-access-5bwcc\") pod \"machine-approver-56656f9798-c4wcz\" (UID: \"c66e4654-4ffd-4a39-8e1e-34d979249c94\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.913453 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jfvj\" (UniqueName: \"kubernetes.io/projected/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-kube-api-access-7jfvj\") pod \"controller-manager-879f6c89f-87z74\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.919266 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.933898 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmpps\" (UniqueName: \"kubernetes.io/projected/e5478630-cfe1-49d8-b597-7c2c9afec10f-kube-api-access-xmpps\") pod \"authentication-operator-69f744f599-vk9xf\" (UID: \"e5478630-cfe1-49d8-b597-7c2c9afec10f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.954525 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wd9q\" (UniqueName: \"kubernetes.io/projected/a130c6a7-c518-4407-82b0-eb291617a482-kube-api-access-8wd9q\") pod \"openshift-config-operator-7777fb866f-2w4fx\" (UID: \"a130c6a7-c518-4407-82b0-eb291617a482\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.975103 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgds4\" (UniqueName: \"kubernetes.io/projected/dd17476b-94eb-4998-8477-50763c1d1222-kube-api-access-bgds4\") pod \"etcd-operator-b45778765-kk9sb\" (UID: \"dd17476b-94eb-4998-8477-50763c1d1222\") " pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.992410 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.992593 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.492565758 +0000 UTC m=+142.114169838 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:29 crc kubenswrapper[4835]: I0202 16:52:29.993054 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:29 crc kubenswrapper[4835]: E0202 16:52:29.993476 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.493460864 +0000 UTC m=+142.115064954 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.001463 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8386d926-f309-4b23-aa26-1bd47506682f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8sjp7\" (UID: \"8386d926-f309-4b23-aa26-1bd47506682f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.017209 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqjgr\" (UniqueName: \"kubernetes.io/projected/636b53c1-5764-44a8-99e4-e0b461c55943-kube-api-access-fqjgr\") pod \"console-operator-58897d9998-dg8r2\" (UID: \"636b53c1-5764-44a8-99e4-e0b461c55943\") " pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.017567 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.020157 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.039560 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.048241 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.059794 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.072133 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.078746 4835 request.go:700] Waited for 1.885178851s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/hostpath-provisioner/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.081038 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.089680 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.099414 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:30 crc kubenswrapper[4835]: E0202 16:52:30.100051 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.600036505 +0000 UTC m=+142.221640585 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.101641 4835 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.119899 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.141013 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.159677 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.174413 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.181547 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.186378 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.200803 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.208380 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:30 crc kubenswrapper[4835]: E0202 16:52:30.208921 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.708902854 +0000 UTC m=+142.330506934 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.218129 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8wthb"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.229396 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.236026 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-wq452"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.243009 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-bound-sa-token\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.243421 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:30 crc kubenswrapper[4835]: W0202 16:52:30.247101 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99f30a12_2016_4dee_a4cc_02699b58d1a6.slice/crio-529b586ed5d9e6f24267bcaae36e7a0d4e7331ae42c8fcdde1a16971f9d04dc8 WatchSource:0}: Error finding container 529b586ed5d9e6f24267bcaae36e7a0d4e7331ae42c8fcdde1a16971f9d04dc8: Status 404 returned error can't find the container with id 529b586ed5d9e6f24267bcaae36e7a0d4e7331ae42c8fcdde1a16971f9d04dc8 Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.259197 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbt6z\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-kube-api-access-pbt6z\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.268474 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-kk9sb"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.273256 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msbtj\" (UniqueName: \"kubernetes.io/projected/b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9-kube-api-access-msbtj\") pod \"apiserver-7bbb656c7d-s2f6g\" (UID: \"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.304541 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msbgc\" (UniqueName: \"kubernetes.io/projected/2affec3b-57ec-4308-8c4f-6c5b6f94e541-kube-api-access-msbgc\") pod \"openshift-apiserver-operator-796bbdcf4f-6wx7c\" (UID: \"2affec3b-57ec-4308-8c4f-6c5b6f94e541\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.310306 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:30 crc kubenswrapper[4835]: E0202 16:52:30.310501 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.810470638 +0000 UTC m=+142.432074718 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.310913 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-config-volume\") pod \"dns-default-9rdhl\" (UID: \"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1\") " pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.310951 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-metrics-tls\") pod \"dns-default-9rdhl\" (UID: \"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1\") " pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.311080 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.311177 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-node-bootstrap-token\") pod \"machine-config-server-7mhmj\" (UID: \"73c03460-5e98-4103-9da2-de2cf1c391f5\") " pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.311306 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-certs\") pod \"machine-config-server-7mhmj\" (UID: \"73c03460-5e98-4103-9da2-de2cf1c391f5\") " pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:30 crc kubenswrapper[4835]: E0202 16:52:30.311679 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.811660583 +0000 UTC m=+142.433264673 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.311706 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-config-volume\") pod \"dns-default-9rdhl\" (UID: \"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1\") " pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.316540 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-certs\") pod \"machine-config-server-7mhmj\" (UID: \"73c03460-5e98-4103-9da2-de2cf1c391f5\") " pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.317421 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-metrics-tls\") pod \"dns-default-9rdhl\" (UID: \"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1\") " pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.319071 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hcjb\" (UniqueName: \"kubernetes.io/projected/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-kube-api-access-2hcjb\") pod \"route-controller-manager-6576b87f9c-qqjn4\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.320048 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/73c03460-5e98-4103-9da2-de2cf1c391f5-node-bootstrap-token\") pod \"machine-config-server-7mhmj\" (UID: \"73c03460-5e98-4103-9da2-de2cf1c391f5\") " pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.335186 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tvbq\" (UniqueName: \"kubernetes.io/projected/ea193cb5-8e86-4628-a115-16a3987f4eaf-kube-api-access-9tvbq\") pod \"oauth-openshift-558db77b4-2n9fx\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.350182 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-44vfm"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.355805 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e69421a3-e42a-4e87-a164-486a774179ff-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.363845 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-87z74"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.381081 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.386715 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlg74\" (UniqueName: \"kubernetes.io/projected/42bd3315-fcd5-4c23-ada1-0e35123d3b7c-kube-api-access-dlg74\") pod \"package-server-manager-789f6589d5-zsx6k\" (UID: \"42bd3315-fcd5-4c23-ada1-0e35123d3b7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.397882 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw2qb\" (UniqueName: \"kubernetes.io/projected/da2cb6be-96b3-4ac1-81ec-ac57cdf853c0-kube-api-access-tw2qb\") pod \"machine-config-operator-74547568cd-smljg\" (UID: \"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.412665 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:30 crc kubenswrapper[4835]: E0202 16:52:30.413647 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:30.913631899 +0000 UTC m=+142.535235979 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.416236 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/32591974-ca1a-4284-a32d-eb3eb856a9d5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-67bm4\" (UID: \"32591974-ca1a-4284-a32d-eb3eb856a9d5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.438889 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj959\" (UniqueName: \"kubernetes.io/projected/6b48c81d-0f0b-497d-83c4-8f495a859829-kube-api-access-tj959\") pod \"migrator-59844c95c7-wvnqh\" (UID: \"6b48c81d-0f0b-497d-83c4-8f495a859829\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.443695 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.453652 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.453827 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkxz5\" (UniqueName: \"kubernetes.io/projected/2670e3af-3faf-4aa2-8674-ad7b94955ef0-kube-api-access-wkxz5\") pod \"downloads-7954f5f757-tqhv4\" (UID: \"2670e3af-3faf-4aa2-8674-ad7b94955ef0\") " pod="openshift-console/downloads-7954f5f757-tqhv4" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.463710 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.471651 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vk9xf"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.479437 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w725d\" (UniqueName: \"kubernetes.io/projected/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-kube-api-access-w725d\") pod \"console-f9d7485db-xpn8c\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.504919 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrrrj\" (UniqueName: \"kubernetes.io/projected/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-kube-api-access-qrrrj\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.514326 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.514963 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.516867 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/076fa5b0-3a9d-45dc-9a52-b8f986cde3e0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-52nq4\" (UID: \"076fa5b0-3a9d-45dc-9a52-b8f986cde3e0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" Feb 02 16:52:30 crc kubenswrapper[4835]: E0202 16:52:30.516876 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.016860662 +0000 UTC m=+142.638464742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.517697 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dg8r2"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.535245 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84mrm\" (UniqueName: \"kubernetes.io/projected/7b9bef11-2ef9-4bad-b548-c86f910ce019-kube-api-access-84mrm\") pod \"ingress-canary-7q9qk\" (UID: \"7b9bef11-2ef9-4bad-b548-c86f910ce019\") " pod="openshift-ingress-canary/ingress-canary-7q9qk" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.544093 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.558948 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsfz2\" (UniqueName: \"kubernetes.io/projected/acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f-kube-api-access-nsfz2\") pod \"control-plane-machine-set-operator-78cbb6b69f-txgvr\" (UID: \"acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.566667 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7"] Feb 02 16:52:30 crc kubenswrapper[4835]: W0202 16:52:30.575499 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod636b53c1_5764_44a8_99e4_e0b461c55943.slice/crio-11d62259815108b36c1bdb241216391158fcb9486f1904421b64474921a4566f WatchSource:0}: Error finding container 11d62259815108b36c1bdb241216391158fcb9486f1904421b64474921a4566f: Status 404 returned error can't find the container with id 11d62259815108b36c1bdb241216391158fcb9486f1904421b64474921a4566f Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.577718 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l48s9\" (UniqueName: \"kubernetes.io/projected/09748673-fe28-490d-84f3-7c0170319531-kube-api-access-l48s9\") pod \"catalog-operator-68c6474976-s4vdz\" (UID: \"09748673-fe28-490d-84f3-7c0170319531\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.582403 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.595325 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdcj9\" (UniqueName: \"kubernetes.io/projected/b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1-kube-api-access-vdcj9\") pod \"dns-default-9rdhl\" (UID: \"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1\") " pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:30 crc kubenswrapper[4835]: W0202 16:52:30.595566 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8386d926_f309_4b23_aa26_1bd47506682f.slice/crio-e785d374277e8df477c9f1e9b4ebaa764c3ac9c14316d281e4fd7827f2374f2d WatchSource:0}: Error finding container e785d374277e8df477c9f1e9b4ebaa764c3ac9c14316d281e4fd7827f2374f2d: Status 404 returned error can't find the container with id e785d374277e8df477c9f1e9b4ebaa764c3ac9c14316d281e4fd7827f2374f2d Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.607415 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.616028 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5bcz\" (UniqueName: \"kubernetes.io/projected/9dfad441-aeb8-4d7d-962a-a2b0d352dac0-kube-api-access-n5bcz\") pod \"packageserver-d55dfcdfc-shcv8\" (UID: \"9dfad441-aeb8-4d7d-962a-a2b0d352dac0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.616558 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:30 crc kubenswrapper[4835]: E0202 16:52:30.616848 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.116822739 +0000 UTC m=+142.738426819 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.624393 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.634761 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.635634 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hprv6\" (UniqueName: \"kubernetes.io/projected/0d67474c-95af-464f-b92a-4f2bc00dd1fd-kube-api-access-hprv6\") pod \"collect-profiles-29500845-cs788\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.645050 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.654084 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.663560 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-szc4r\" (UID: \"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.665976 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dg8r2" event={"ID":"636b53c1-5764-44a8-99e4-e0b461c55943","Type":"ContainerStarted","Data":"11d62259815108b36c1bdb241216391158fcb9486f1904421b64474921a4566f"} Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.670980 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-tqhv4" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.675834 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" event={"ID":"c66e4654-4ffd-4a39-8e1e-34d979249c94","Type":"ContainerStarted","Data":"4a56fa43bb351abf1ea1828f204699bbe994b9d1f9afedc6a6579e61e3907649"} Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.681341 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" event={"ID":"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c","Type":"ContainerStarted","Data":"940e7a02adc0a26daf7292a2e7eb95fb8b282e3c976084f3d5483b3cbb0df26d"} Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.682701 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" event={"ID":"dd17476b-94eb-4998-8477-50763c1d1222","Type":"ContainerStarted","Data":"2c671a708313d1809d8bc76246bb933e52fce1f9465171e61186fe40b7aebcf9"} Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.683753 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" event={"ID":"8386d926-f309-4b23-aa26-1bd47506682f","Type":"ContainerStarted","Data":"e785d374277e8df477c9f1e9b4ebaa764c3ac9c14316d281e4fd7827f2374f2d"} Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.684369 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" event={"ID":"99f30a12-2016-4dee-a4cc-02699b58d1a6","Type":"ContainerStarted","Data":"529b586ed5d9e6f24267bcaae36e7a0d4e7331ae42c8fcdde1a16971f9d04dc8"} Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.685399 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" event={"ID":"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192","Type":"ContainerStarted","Data":"0458aa78b48c0afae4c7208426e8ef4ff84f6aae8c2424b1f030f09742df935a"} Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.687626 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wq452" event={"ID":"d12f85a7-4683-4ad1-aa7c-0c30b52f976f","Type":"ContainerStarted","Data":"6975748d92d361f73e30785a31b0e785512437a34d276b731db765020a828611"} Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.691616 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" event={"ID":"e5478630-cfe1-49d8-b597-7c2c9afec10f","Type":"ContainerStarted","Data":"fddc34e6814b6f8ea0df98d3585e57e0f2c14d1f27963c078874c3ddcaf2cfd6"} Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.694413 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg8gs\" (UniqueName: \"kubernetes.io/projected/aba12881-8bed-4d65-9f58-71b9e848a1cb-kube-api-access-jg8gs\") pod \"machine-config-controller-84d6567774-jsn2h\" (UID: \"aba12881-8bed-4d65-9f58-71b9e848a1cb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.695219 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" event={"ID":"a130c6a7-c518-4407-82b0-eb291617a482","Type":"ContainerStarted","Data":"bcdec3a05c35431fdc7186b5ce176796e4d26027fbafe5061ceda5de3ac2108d"} Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.709609 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.718089 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:30 crc kubenswrapper[4835]: E0202 16:52:30.718445 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.218433965 +0000 UTC m=+142.840038035 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.728203 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vd9f\" (UniqueName: \"kubernetes.io/projected/9e36008e-f103-4e5a-9543-6d3ef330d446-kube-api-access-8vd9f\") pod \"csi-hostpathplugin-wq59n\" (UID: \"9e36008e-f103-4e5a-9543-6d3ef330d446\") " pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.728729 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw48q\" (UniqueName: \"kubernetes.io/projected/6b160815-f795-4fb0-8f99-f6b3086709a5-kube-api-access-dw48q\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9gvn\" (UID: \"6b160815-f795-4fb0-8f99-f6b3086709a5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.734522 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbz4x\" (UniqueName: \"kubernetes.io/projected/5213a65c-70c8-4bd5-8f3d-071943782bbf-kube-api-access-kbz4x\") pod \"openshift-controller-manager-operator-756b6f6bc6-76pc7\" (UID: \"5213a65c-70c8-4bd5-8f3d-071943782bbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.736352 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.746416 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.754785 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk4cn\" (UniqueName: \"kubernetes.io/projected/8951d83b-fadf-4d39-b1b9-9012f84066c2-kube-api-access-jk4cn\") pod \"service-ca-operator-777779d784-2pghx\" (UID: \"8951d83b-fadf-4d39-b1b9-9012f84066c2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.763438 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.774526 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgt2v\" (UniqueName: \"kubernetes.io/projected/e69421a3-e42a-4e87-a164-486a774179ff-kube-api-access-bgt2v\") pod \"ingress-operator-5b745b69d9-gdfkf\" (UID: \"e69421a3-e42a-4e87-a164-486a774179ff\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.777040 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.785074 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.792604 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp7nq\" (UniqueName: \"kubernetes.io/projected/41d9518a-1700-401e-96bd-3e855533c109-kube-api-access-tp7nq\") pod \"service-ca-9c57cc56f-85m82\" (UID: \"41d9518a-1700-401e-96bd-3e855533c109\") " pod="openshift-service-ca/service-ca-9c57cc56f-85m82" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.800148 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-7q9qk" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.821217 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:30 crc kubenswrapper[4835]: E0202 16:52:30.821919 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.321893645 +0000 UTC m=+142.943497725 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.822164 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd6cd\" (UniqueName: \"kubernetes.io/projected/dd8b41f5-efda-47ad-83ef-891bcf11fc70-kube-api-access-dd6cd\") pod \"olm-operator-6b444d44fb-cs4g8\" (UID: \"dd8b41f5-efda-47ad-83ef-891bcf11fc70\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.848921 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj6ls\" (UniqueName: \"kubernetes.io/projected/2b288d3c-3c0e-4290-819a-d5c1e7eedcf0-kube-api-access-dj6ls\") pod \"multus-admission-controller-857f4d67dd-cs52k\" (UID: \"2b288d3c-3c0e-4290-819a-d5c1e7eedcf0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.851084 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wq59n" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.853220 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-2n9fx"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.858626 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.859267 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.859766 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qtsx\" (UniqueName: \"kubernetes.io/projected/73c03460-5e98-4103-9da2-de2cf1c391f5-kube-api-access-8qtsx\") pod \"machine-config-server-7mhmj\" (UID: \"73c03460-5e98-4103-9da2-de2cf1c391f5\") " pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.876190 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd6r8\" (UniqueName: \"kubernetes.io/projected/20b9b4db-66e7-4732-986a-5b766e8bc6fb-kube-api-access-sd6r8\") pod \"cluster-samples-operator-665b6dd947-nwzzt\" (UID: \"20b9b4db-66e7-4732-986a-5b766e8bc6fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.889414 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.893870 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpcpc\" (UniqueName: \"kubernetes.io/projected/1bfd176d-b2cc-45f8-a80d-61e391f25163-kube-api-access-kpcpc\") pod \"marketplace-operator-79b997595-k5vz8\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.894548 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.899126 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4"] Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.901762 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.914384 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7r96\" (UniqueName: \"kubernetes.io/projected/75c4b353-ddb5-4709-ab21-94059ac83671-kube-api-access-g7r96\") pod \"router-default-5444994796-bbqwj\" (UID: \"75c4b353-ddb5-4709-ab21-94059ac83671\") " pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.927522 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:30 crc kubenswrapper[4835]: E0202 16:52:30.927852 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.427835528 +0000 UTC m=+143.049439608 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.929857 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.963525 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.988964 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" Feb 02 16:52:30 crc kubenswrapper[4835]: W0202 16:52:30.991960 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6f3dc6d_8984_420d_81cd_e2a2db1d4bd9.slice/crio-dd3103027a42ff8e356648f027add4b3a7f96c39ad480139c68b3784d0101091 WatchSource:0}: Error finding container dd3103027a42ff8e356648f027add4b3a7f96c39ad480139c68b3784d0101091: Status 404 returned error can't find the container with id dd3103027a42ff8e356648f027add4b3a7f96c39ad480139c68b3784d0101091 Feb 02 16:52:30 crc kubenswrapper[4835]: I0202 16:52:30.997422 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:31 crc kubenswrapper[4835]: W0202 16:52:31.003457 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d31701b_ac62_4bdc_91d5_d9f411f6cf23.slice/crio-476c43b7d8627952d3b500f6daced3f7cb628f3f0e90e0c21a7e6a79bdc0ffd1 WatchSource:0}: Error finding container 476c43b7d8627952d3b500f6daced3f7cb628f3f0e90e0c21a7e6a79bdc0ffd1: Status 404 returned error can't find the container with id 476c43b7d8627952d3b500f6daced3f7cb628f3f0e90e0c21a7e6a79bdc0ffd1 Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.020580 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xpn8c"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.021644 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.030899 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.031087 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.53106587 +0000 UTC m=+143.152669960 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.031142 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.031488 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.531476893 +0000 UTC m=+143.153080973 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.062816 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.070913 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.092974 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-85m82" Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.108311 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-7mhmj" Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.134004 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.134424 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.634406997 +0000 UTC m=+143.256011077 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.146920 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.237622 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.238123 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.738104284 +0000 UTC m=+143.359708364 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.253152 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.338659 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.338788 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.838772292 +0000 UTC m=+143.460376372 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.339103 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.339365 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.839358759 +0000 UTC m=+143.460962839 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.439986 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.440118 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.940094488 +0000 UTC m=+143.561698568 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.440262 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.440543 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:31.940532691 +0000 UTC m=+143.562136771 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: W0202 16:52:31.485163 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42bd3315_fcd5_4c23_ada1_0e35123d3b7c.slice/crio-2ff08d7e74dec8d30c5153745eed6d047f59dedd722df650b84a0d38f8930b35 WatchSource:0}: Error finding container 2ff08d7e74dec8d30c5153745eed6d047f59dedd722df650b84a0d38f8930b35: Status 404 returned error can't find the container with id 2ff08d7e74dec8d30c5153745eed6d047f59dedd722df650b84a0d38f8930b35 Feb 02 16:52:31 crc kubenswrapper[4835]: W0202 16:52:31.497589 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b48c81d_0f0b_497d_83c4_8f495a859829.slice/crio-c8ec57b4655caafc95260e2a54abc3500e6e872b5b1bf21f8f7a22e013432fb5 WatchSource:0}: Error finding container c8ec57b4655caafc95260e2a54abc3500e6e872b5b1bf21f8f7a22e013432fb5: Status 404 returned error can't find the container with id c8ec57b4655caafc95260e2a54abc3500e6e872b5b1bf21f8f7a22e013432fb5 Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.541722 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.541858 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.041839337 +0000 UTC m=+143.663443417 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.542073 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.542518 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.042499696 +0000 UTC m=+143.664103776 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.567656 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.588894 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2pghx"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.644480 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.649483 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.149439659 +0000 UTC m=+143.771043739 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.658410 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.679530 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.730924 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-tqhv4"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.756174 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.756574 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.256558396 +0000 UTC m=+143.878162486 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.764403 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.774496 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-smljg"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.783313 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.793734 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wq59n"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.796859 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" event={"ID":"2affec3b-57ec-4308-8c4f-6c5b6f94e541","Type":"ContainerStarted","Data":"1113b7566b614a640d4f71be346d8d0a7fd65a1860bc6dd916a178977aedafcb"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.798677 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz"] Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.803490 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" event={"ID":"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c","Type":"ContainerStarted","Data":"c46e4fbf2fef9e66eea766dcebc625006248cf6c3424ecebbf3f50aad1b9f40e"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.804190 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.813435 4835 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-87z74 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.813476 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" podUID="1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.814633 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" event={"ID":"dd17476b-94eb-4998-8477-50763c1d1222","Type":"ContainerStarted","Data":"fe4bc9615e61f0e15dc4953e261e3bccbd4b1f808a010affaf10215a2c1c7e79"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.829772 4835 generic.go:334] "Generic (PLEG): container finished" podID="d12f85a7-4683-4ad1-aa7c-0c30b52f976f" containerID="aa35b136bac91c9a9ce40fc6da09dce2b23d40031f896bfed85215f9f6125b4c" exitCode=0 Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.829865 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wq452" event={"ID":"d12f85a7-4683-4ad1-aa7c-0c30b52f976f","Type":"ContainerDied","Data":"aa35b136bac91c9a9ce40fc6da09dce2b23d40031f896bfed85215f9f6125b4c"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.833085 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" event={"ID":"aba12881-8bed-4d65-9f58-71b9e848a1cb","Type":"ContainerStarted","Data":"a15f605488fecaf25bc8f25a1b52ada0ba1f5b49e5edefdd79989056a3282393"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.834754 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" event={"ID":"42bd3315-fcd5-4c23-ada1-0e35123d3b7c","Type":"ContainerStarted","Data":"2ff08d7e74dec8d30c5153745eed6d047f59dedd722df650b84a0d38f8930b35"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.835888 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xpn8c" event={"ID":"aad2f2e8-6800-4238-a0ab-ee3304bad4c1","Type":"ContainerStarted","Data":"ee39f44671c742e7633f8f609bfb27f3126ad58c2a3c1a76b3cb45d562ef9031"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.836802 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" event={"ID":"076fa5b0-3a9d-45dc-9a52-b8f986cde3e0","Type":"ContainerStarted","Data":"3f6d00c7ca22543417839575160f45c7d5dfdb057334022572ea9269f87c06f0"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.840179 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh" event={"ID":"6b48c81d-0f0b-497d-83c4-8f495a859829","Type":"ContainerStarted","Data":"c8ec57b4655caafc95260e2a54abc3500e6e872b5b1bf21f8f7a22e013432fb5"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.843579 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" event={"ID":"e5478630-cfe1-49d8-b597-7c2c9afec10f","Type":"ContainerStarted","Data":"ef0069d7244cffaf9a3fa08e511046221cd1ec35beea547c04c52a213fc08fe5"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.851102 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" event={"ID":"5d31701b-ac62-4bdc-91d5-d9f411f6cf23","Type":"ContainerStarted","Data":"476c43b7d8627952d3b500f6daced3f7cb628f3f0e90e0c21a7e6a79bdc0ffd1"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.856969 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.857293 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.357247494 +0000 UTC m=+143.978851574 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.857353 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.858028 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.357995166 +0000 UTC m=+143.979599236 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.865178 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" event={"ID":"c66e4654-4ffd-4a39-8e1e-34d979249c94","Type":"ContainerStarted","Data":"82996d08e971f8977a684537280217529b60cd58cbb18c601f0b5cb32993eda7"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.866760 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" event={"ID":"99f30a12-2016-4dee-a4cc-02699b58d1a6","Type":"ContainerStarted","Data":"59b6921cf6330a8c6a43c27aa931e4e92450f3210ba699c45ba022c9a645eba2"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.867733 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" event={"ID":"ea193cb5-8e86-4628-a115-16a3987f4eaf","Type":"ContainerStarted","Data":"879d2e0ee4fdf155d1bf2c61155063211432fee08da8c7c75fc0c6fac4df9983"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.871970 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" event={"ID":"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192","Type":"ContainerStarted","Data":"f6a44cdb452767da1ec4805b60c489c34a0ab4eb7e21755ea0362e0829c97fb1"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.902675 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" event={"ID":"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9","Type":"ContainerStarted","Data":"dd3103027a42ff8e356648f027add4b3a7f96c39ad480139c68b3784d0101091"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.904949 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dg8r2" event={"ID":"636b53c1-5764-44a8-99e4-e0b461c55943","Type":"ContainerStarted","Data":"d027c5c693636b6fb02930ca6c17af4e6c112cc10fc2c8ca2f930b2b69e92570"} Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.905311 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.922606 4835 patch_prober.go:28] interesting pod/console-operator-58897d9998-dg8r2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.922685 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-dg8r2" podUID="636b53c1-5764-44a8-99e4-e0b461c55943" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" Feb 02 16:52:31 crc kubenswrapper[4835]: I0202 16:52:31.958377 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:31 crc kubenswrapper[4835]: E0202 16:52:31.959952 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.459929061 +0000 UTC m=+144.081533141 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.052231 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7"] Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.061320 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.062487 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.562465394 +0000 UTC m=+144.184069664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.165068 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.165489 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.665470321 +0000 UTC m=+144.287074411 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.192006 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cs52k"] Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.242752 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8"] Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.266265 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.266610 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.766598132 +0000 UTC m=+144.388202212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.294355 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5vz8"] Feb 02 16:52:32 crc kubenswrapper[4835]: W0202 16:52:32.303543 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5213a65c_70c8_4bd5_8f3d_071943782bbf.slice/crio-a083ff8fe55a8e895f58dc37bc18b25c67319d84b0e71702025cd8e8aeeec1a0 WatchSource:0}: Error finding container a083ff8fe55a8e895f58dc37bc18b25c67319d84b0e71702025cd8e8aeeec1a0: Status 404 returned error can't find the container with id a083ff8fe55a8e895f58dc37bc18b25c67319d84b0e71702025cd8e8aeeec1a0 Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.326796 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-7q9qk"] Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.327164 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr"] Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.367529 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.367826 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.867807256 +0000 UTC m=+144.489411336 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.368294 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.368560 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.868552288 +0000 UTC m=+144.490156368 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.404683 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-85m82"] Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.415135 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r"] Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.421363 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn"] Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.424340 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt"] Feb 02 16:52:32 crc kubenswrapper[4835]: W0202 16:52:32.437373 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1bfd176d_b2cc_45f8_a80d_61e391f25163.slice/crio-a11287ec32510918a50401159353db8e067ceb0f364c415e9507d506ad476c42 WatchSource:0}: Error finding container a11287ec32510918a50401159353db8e067ceb0f364c415e9507d506ad476c42: Status 404 returned error can't find the container with id a11287ec32510918a50401159353db8e067ceb0f364c415e9507d506ad476c42 Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.472290 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.472749 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:32.972730358 +0000 UTC m=+144.594334438 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.497323 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9rdhl"] Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.583281 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.583601 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.083591306 +0000 UTC m=+144.705195386 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: W0202 16:52:32.621366 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b160815_f795_4fb0_8f99_f6b3086709a5.slice/crio-4a60e5baae3f807262627bc88e921076163c1ee2479b56740385021742ea3e1e WatchSource:0}: Error finding container 4a60e5baae3f807262627bc88e921076163c1ee2479b56740385021742ea3e1e: Status 404 returned error can't find the container with id 4a60e5baae3f807262627bc88e921076163c1ee2479b56740385021742ea3e1e Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.661191 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-kk9sb" podStartSLOduration=123.661175305 podStartE2EDuration="2m3.661175305s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:32.659661211 +0000 UTC m=+144.281265281" watchObservedRunningTime="2026-02-02 16:52:32.661175305 +0000 UTC m=+144.282779385" Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.686005 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.686239 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.186222351 +0000 UTC m=+144.807826431 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.686304 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.686604 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.186596402 +0000 UTC m=+144.808200472 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.744647 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-dg8r2" podStartSLOduration=123.744624447 podStartE2EDuration="2m3.744624447s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:32.725684981 +0000 UTC m=+144.347289061" watchObservedRunningTime="2026-02-02 16:52:32.744624447 +0000 UTC m=+144.366228527" Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.745689 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-vk9xf" podStartSLOduration=123.745680658 podStartE2EDuration="2m3.745680658s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:32.742289939 +0000 UTC m=+144.363894019" watchObservedRunningTime="2026-02-02 16:52:32.745680658 +0000 UTC m=+144.367284738" Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.770942 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" podStartSLOduration=123.770905179 podStartE2EDuration="2m3.770905179s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:32.76956246 +0000 UTC m=+144.391166540" watchObservedRunningTime="2026-02-02 16:52:32.770905179 +0000 UTC m=+144.392509259" Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.787134 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.788113 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.288095744 +0000 UTC m=+144.909699824 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.888390 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.889180 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.389128533 +0000 UTC m=+145.010732613 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.916510 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" event={"ID":"2b288d3c-3c0e-4290-819a-d5c1e7eedcf0","Type":"ContainerStarted","Data":"ee51d0df8446e87ea807fdee3e8bd0d3f50af66ee79c3a6ec5fa1d346d2d13b9"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.917140 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" event={"ID":"20b9b4db-66e7-4732-986a-5b766e8bc6fb","Type":"ContainerStarted","Data":"c8fbfd4e2a9bc09e96b9b286b7225a36af9596f68746ac4df5466acbf4873201"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.918330 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9rdhl" event={"ID":"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1","Type":"ContainerStarted","Data":"f0e6d2cade32feb2efc7dc5e015fac3042b31fe69050c3764595a5e21c72c038"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.920622 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" event={"ID":"32591974-ca1a-4284-a32d-eb3eb856a9d5","Type":"ContainerStarted","Data":"7c33dbe66e1793e2ffc141d45640724fa2305317b8f602b624e705cb9a03b720"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.921465 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" event={"ID":"6b160815-f795-4fb0-8f99-f6b3086709a5","Type":"ContainerStarted","Data":"4a60e5baae3f807262627bc88e921076163c1ee2479b56740385021742ea3e1e"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.923076 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" event={"ID":"67a8f4cf-ff9c-48ab-92dd-b2e096ab4192","Type":"ContainerStarted","Data":"8692aaa975fc24c3f4a92ff17880844af15aa42f67f6dfd352a3611e1bdc31c6"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.925263 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" event={"ID":"5d31701b-ac62-4bdc-91d5-d9f411f6cf23","Type":"ContainerStarted","Data":"5aa6bb928922b9f0ad1ad1291055816bd9a9238803853e50a2d3a00ffd4b0dc1"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.926012 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.928386 4835 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-qqjn4 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.928427 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" event={"ID":"09748673-fe28-490d-84f3-7c0170319531","Type":"ContainerStarted","Data":"b95b42263b27eec110a9d35b7c7ba4548b8f85509a77e6a04cec0360252cd46a"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.928429 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" podUID="5d31701b-ac62-4bdc-91d5-d9f411f6cf23" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.933001 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" event={"ID":"ea193cb5-8e86-4628-a115-16a3987f4eaf","Type":"ContainerStarted","Data":"6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.933440 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.946883 4835 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-2n9fx container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.31:6443/healthz\": dial tcp 10.217.0.31:6443: connect: connection refused" start-of-body= Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.946949 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" podUID="ea193cb5-8e86-4628-a115-16a3987f4eaf" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.31:6443/healthz\": dial tcp 10.217.0.31:6443: connect: connection refused" Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.946867 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-44vfm" podStartSLOduration=122.946850049 podStartE2EDuration="2m2.946850049s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:32.943047487 +0000 UTC m=+144.564651567" watchObservedRunningTime="2026-02-02 16:52:32.946850049 +0000 UTC m=+144.568454129" Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.950614 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" event={"ID":"2affec3b-57ec-4308-8c4f-6c5b6f94e541","Type":"ContainerStarted","Data":"0a142aecee44e377382792c4a3cde272d3377fc1d2ee16a85457119afbe37121"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.954347 4835 generic.go:334] "Generic (PLEG): container finished" podID="b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9" containerID="bb8ac28d97d204090db827543e61a0ca57a9e9e3ca39e86d79822ab1cf7f825a" exitCode=0 Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.954529 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" event={"ID":"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9","Type":"ContainerDied","Data":"bb8ac28d97d204090db827543e61a0ca57a9e9e3ca39e86d79822ab1cf7f825a"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.955972 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr" event={"ID":"acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f","Type":"ContainerStarted","Data":"15622900b73b8fbcda85cb9ff8cb5bb3cd96740a2cb357aa39e40318503b7b16"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.958579 4835 generic.go:334] "Generic (PLEG): container finished" podID="a130c6a7-c518-4407-82b0-eb291617a482" containerID="36ed0d05e3e29061b313d702564e58de70bc60f52e2b766147451ce11712b359" exitCode=0 Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.958921 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" event={"ID":"a130c6a7-c518-4407-82b0-eb291617a482","Type":"ContainerDied","Data":"36ed0d05e3e29061b313d702564e58de70bc60f52e2b766147451ce11712b359"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.963694 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" event={"ID":"42bd3315-fcd5-4c23-ada1-0e35123d3b7c","Type":"ContainerStarted","Data":"09c21250c5ff18a7558a2faea273e22e93bc457d9cc02fa854515e01d5820069"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.965308 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" podStartSLOduration=123.96525867 podStartE2EDuration="2m3.96525867s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:32.962936022 +0000 UTC m=+144.584540102" watchObservedRunningTime="2026-02-02 16:52:32.96525867 +0000 UTC m=+144.586862750" Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.967549 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" event={"ID":"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0","Type":"ContainerStarted","Data":"a74c40215f1c182f95285b5bdc635895aa501fe024fdf9d2be716795e995cd07"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.968441 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" event={"ID":"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1","Type":"ContainerStarted","Data":"9a718c9f0933a99032a1fe559a555bade62b627b7af1da35461abccd7632b7f3"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.974446 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-tqhv4" event={"ID":"2670e3af-3faf-4aa2-8674-ad7b94955ef0","Type":"ContainerStarted","Data":"2957deb988539cede2f1768faaff687b488dda617980234776aa3103183befb6"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.975880 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" event={"ID":"5213a65c-70c8-4bd5-8f3d-071943782bbf","Type":"ContainerStarted","Data":"a083ff8fe55a8e895f58dc37bc18b25c67319d84b0e71702025cd8e8aeeec1a0"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.983367 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" event={"ID":"8386d926-f309-4b23-aa26-1bd47506682f","Type":"ContainerStarted","Data":"6ad9d8ceba35f335065cadff93ab4be2be665939c283305ef226890915f35cfd"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.986634 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" event={"ID":"9dfad441-aeb8-4d7d-962a-a2b0d352dac0","Type":"ContainerStarted","Data":"8b3c610c8042c965ca56c8e966ab4e23b9887dd44382762ad92ff0c621c91dc3"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.987528 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wq59n" event={"ID":"9e36008e-f103-4e5a-9543-6d3ef330d446","Type":"ContainerStarted","Data":"1006deb2bf26bd7b7e044129d983554a220ffad1caecc589a47988f255f3c8a9"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.994839 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh" event={"ID":"6b48c81d-0f0b-497d-83c4-8f495a859829","Type":"ContainerStarted","Data":"b8db962e46dabf5d6ae1d0969a67344515b38a94c80b87ffb6b19f4172ca2d4e"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.996835 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-7q9qk" event={"ID":"7b9bef11-2ef9-4bad-b548-c86f910ce019","Type":"ContainerStarted","Data":"1cd829b992ccc6e5cffa41ce5591cabad432f871e547c56f14dba062851d1ca1"} Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.998827 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.999236 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.499216198 +0000 UTC m=+145.120820278 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.999333 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.999380 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" event={"ID":"0d67474c-95af-464f-b92a-4f2bc00dd1fd","Type":"ContainerStarted","Data":"09e761f66cb5798c0fc433263c3aa401c43d5e36ca7e7dfadf3ee73cfb193ea8"} Feb 02 16:52:32 crc kubenswrapper[4835]: E0202 16:52:32.999691 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.499679961 +0000 UTC m=+145.121284041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:32 crc kubenswrapper[4835]: I0202 16:52:32.999757 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" podStartSLOduration=122.999745813 podStartE2EDuration="2m2.999745813s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:32.981907879 +0000 UTC m=+144.603511959" watchObservedRunningTime="2026-02-02 16:52:32.999745813 +0000 UTC m=+144.621349893" Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.000436 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6wx7c" podStartSLOduration=124.000429123 podStartE2EDuration="2m4.000429123s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:32.998781465 +0000 UTC m=+144.620385565" watchObservedRunningTime="2026-02-02 16:52:33.000429123 +0000 UTC m=+144.622033213" Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.004825 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-bbqwj" event={"ID":"75c4b353-ddb5-4709-ab21-94059ac83671","Type":"ContainerStarted","Data":"58726c0d176accd8da72c98ecde22d91c5dfe584bb8e8650ae720a660359b3d8"} Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.004895 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-bbqwj" event={"ID":"75c4b353-ddb5-4709-ab21-94059ac83671","Type":"ContainerStarted","Data":"c2cb5c62932ee40289e5078be3bee0557805e9fa80028048e95553d7d5cff31a"} Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.007232 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xpn8c" event={"ID":"aad2f2e8-6800-4238-a0ab-ee3304bad4c1","Type":"ContainerStarted","Data":"44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee"} Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.013744 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" event={"ID":"8951d83b-fadf-4d39-b1b9-9012f84066c2","Type":"ContainerStarted","Data":"fac7fab170b1fd60c6e22a4a95d6993c03944e3c0087bba795f0f72806101283"} Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.013800 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" event={"ID":"8951d83b-fadf-4d39-b1b9-9012f84066c2","Type":"ContainerStarted","Data":"093f80d171856b1c4464634e0bf873bdc25e389f08a177a2d50a6eb1739beef8"} Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.022515 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.022933 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-7mhmj" event={"ID":"73c03460-5e98-4103-9da2-de2cf1c391f5","Type":"ContainerStarted","Data":"56696bf2604cc98336bc4e7f496b93f7be92c7e8ee0ad01453fac794ee8e6e2c"} Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.023740 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.023782 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.030836 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-85m82" event={"ID":"41d9518a-1700-401e-96bd-3e855533c109","Type":"ContainerStarted","Data":"dc53ee571076b243acde469acacafb73f6c3578b46fa0602a9dd0d8b00f034b3"} Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.042120 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" event={"ID":"e69421a3-e42a-4e87-a164-486a774179ff","Type":"ContainerStarted","Data":"9464aa30f6bc54d77556e8c58548adbdff9a8b9dde303fa66d9a0fbac4708f51"} Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.043882 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" event={"ID":"1bfd176d-b2cc-45f8-a80d-61e391f25163","Type":"ContainerStarted","Data":"a11287ec32510918a50401159353db8e067ceb0f364c415e9507d506ad476c42"} Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.045754 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" event={"ID":"076fa5b0-3a9d-45dc-9a52-b8f986cde3e0","Type":"ContainerStarted","Data":"463127d24a1227e621fb1ce4a3027901345b39cd4591e608a91581e0c84084d1"} Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.046676 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" event={"ID":"dd8b41f5-efda-47ad-83ef-891bcf11fc70","Type":"ContainerStarted","Data":"5f90567c50159bdd82cc36d3751af79abeb1ea9071c7bb6acb075c784520d90b"} Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.047246 4835 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-87z74 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.047295 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" podUID="1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.047259 4835 patch_prober.go:28] interesting pod/console-operator-58897d9998-dg8r2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.047691 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-dg8r2" podUID="636b53c1-5764-44a8-99e4-e0b461c55943" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.062266 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sjp7" podStartSLOduration=124.062239279 podStartE2EDuration="2m4.062239279s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:33.05544754 +0000 UTC m=+144.677051620" watchObservedRunningTime="2026-02-02 16:52:33.062239279 +0000 UTC m=+144.683843369" Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.099765 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.100885 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.600831173 +0000 UTC m=+145.222435253 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.106111 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2pghx" podStartSLOduration=123.106093008 podStartE2EDuration="2m3.106093008s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:33.095134066 +0000 UTC m=+144.716738146" watchObservedRunningTime="2026-02-02 16:52:33.106093008 +0000 UTC m=+144.727697078" Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.146144 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-bbqwj" podStartSLOduration=124.146109453 podStartE2EDuration="2m4.146109453s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:33.145040232 +0000 UTC m=+144.766644312" watchObservedRunningTime="2026-02-02 16:52:33.146109453 +0000 UTC m=+144.767713533" Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.198510 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-xpn8c" podStartSLOduration=124.198494523 podStartE2EDuration="2m4.198494523s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:33.179218546 +0000 UTC m=+144.800822626" watchObservedRunningTime="2026-02-02 16:52:33.198494523 +0000 UTC m=+144.820098593" Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.212677 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.217135 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.71711724 +0000 UTC m=+145.338721320 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.264662 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-52nq4" podStartSLOduration=124.264641436 podStartE2EDuration="2m4.264641436s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:33.260810574 +0000 UTC m=+144.882414654" watchObservedRunningTime="2026-02-02 16:52:33.264641436 +0000 UTC m=+144.886245516" Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.314219 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.314537 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.814511291 +0000 UTC m=+145.436115371 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.314621 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.314910 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.814901953 +0000 UTC m=+145.436506033 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.417625 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.418046 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.918022033 +0000 UTC m=+145.539626113 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.419169 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.419540 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:33.919529427 +0000 UTC m=+145.541133507 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.520213 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.520380 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.02035429 +0000 UTC m=+145.641958360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.520429 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.520820 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.020808563 +0000 UTC m=+145.642412643 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.626239 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.626912 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.12689655 +0000 UTC m=+145.748500630 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.727756 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.728085 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.228071143 +0000 UTC m=+145.849675223 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.828455 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.829191 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.329169513 +0000 UTC m=+145.950773593 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:33 crc kubenswrapper[4835]: I0202 16:52:33.930137 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:33 crc kubenswrapper[4835]: E0202 16:52:33.930497 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.4304803 +0000 UTC m=+146.052084460 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.025748 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.026066 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.031598 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.032005 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.531991292 +0000 UTC m=+146.153595372 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.061507 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-85m82" event={"ID":"41d9518a-1700-401e-96bd-3e855533c109","Type":"ContainerStarted","Data":"bbb1de3f1b60b46cfb4b43d42febd4316bca4acf5b0a51c40cb7dd5d0e0dc0de"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.070798 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" event={"ID":"102e6fe9-5109-4dc7-a6f3-40a3e1cf32e1","Type":"ContainerStarted","Data":"17fa86fd755d98285d3c0fc0d7758edf0c83d8ff0c66197942975ea67b14d7ae"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.076447 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh" event={"ID":"6b48c81d-0f0b-497d-83c4-8f495a859829","Type":"ContainerStarted","Data":"b82096f146bf4f3f3dd31d60176082fc7ad45f75a409518853c1f8e81ce32eb9"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.077181 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-85m82" podStartSLOduration=124.07716316 podStartE2EDuration="2m4.07716316s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.075003796 +0000 UTC m=+145.696607896" watchObservedRunningTime="2026-02-02 16:52:34.07716316 +0000 UTC m=+145.698767240" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.084342 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr" event={"ID":"acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f","Type":"ContainerStarted","Data":"454b3c898dfa048e0e267662ded9415c4e939ae47520e6817615dc00504c5bd5"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.085730 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wq452" event={"ID":"d12f85a7-4683-4ad1-aa7c-0c30b52f976f","Type":"ContainerStarted","Data":"64d0b5adc4f4b06cda7a07059ab1b91e1de1e1c64ca449293c92995cb71d33fb"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.091383 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" event={"ID":"a130c6a7-c518-4407-82b0-eb291617a482","Type":"ContainerStarted","Data":"64eae022250d1cb2ab73ddd7ef45421e917b3b2d686c34236f1cf718315e32df"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.091600 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.093484 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-szc4r" podStartSLOduration=125.093472289 podStartE2EDuration="2m5.093472289s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.091080788 +0000 UTC m=+145.712684878" watchObservedRunningTime="2026-02-02 16:52:34.093472289 +0000 UTC m=+145.715076369" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.100796 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" event={"ID":"aba12881-8bed-4d65-9f58-71b9e848a1cb","Type":"ContainerStarted","Data":"a2dca4d2b600a12c03c69ed0e2b825099eb142a055e432a5e6de3a62a17e6a19"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.100846 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" event={"ID":"aba12881-8bed-4d65-9f58-71b9e848a1cb","Type":"ContainerStarted","Data":"1453bde60a22e296e09de2a403fbca1d4baca97f8aa8a4bcf0a5eb911f7db934"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.105834 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" event={"ID":"9dfad441-aeb8-4d7d-962a-a2b0d352dac0","Type":"ContainerStarted","Data":"1d3b1fd609a3e1628ffd786995c65bb82639ec7dcceac21d967cf2ccd978a325"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.106073 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.110847 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" event={"ID":"c66e4654-4ffd-4a39-8e1e-34d979249c94","Type":"ContainerStarted","Data":"88cada915e72517a64486d60ba82c43330b9b542d88002e532fefd37f5bdecd2"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.115466 4835 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-shcv8 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" start-of-body= Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.115515 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" podUID="9dfad441-aeb8-4d7d-962a-a2b0d352dac0" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.125449 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-txgvr" podStartSLOduration=125.125428598 podStartE2EDuration="2m5.125428598s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.109781688 +0000 UTC m=+145.731385788" watchObservedRunningTime="2026-02-02 16:52:34.125428598 +0000 UTC m=+145.747032678" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.129901 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" event={"ID":"dd8b41f5-efda-47ad-83ef-891bcf11fc70","Type":"ContainerStarted","Data":"d774a15531f5a83031a514d9d7b56723a95a2b31e3e14dccfac1625ac95748b7"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.130459 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.142415 4835 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-cs4g8 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.142478 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" podUID="dd8b41f5-efda-47ad-83ef-891bcf11fc70" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.144095 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.144438 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.644423186 +0000 UTC m=+146.266027266 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.145398 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wvnqh" podStartSLOduration=125.145383984 podStartE2EDuration="2m5.145383984s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.143382215 +0000 UTC m=+145.764986325" watchObservedRunningTime="2026-02-02 16:52:34.145383984 +0000 UTC m=+145.766988064" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.148034 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" event={"ID":"0d67474c-95af-464f-b92a-4f2bc00dd1fd","Type":"ContainerStarted","Data":"5219781c385b6b91b578780f7c113d67d24c583c1db6ec0d1dfa658369805081"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.161956 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" event={"ID":"2b288d3c-3c0e-4290-819a-d5c1e7eedcf0","Type":"ContainerStarted","Data":"e0df34732ea9dc4a0ee98477943c8f352c4e6bb27bf27c797ff6435d17c02946"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.171525 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-7q9qk" event={"ID":"7b9bef11-2ef9-4bad-b548-c86f910ce019","Type":"ContainerStarted","Data":"68dee99c6c057c90e3caa96602a5e4e1643b66035859874bf56f1fea837b0838"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.182838 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" podStartSLOduration=125.182814694 podStartE2EDuration="2m5.182814694s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.176767766 +0000 UTC m=+145.798371856" watchObservedRunningTime="2026-02-02 16:52:34.182814694 +0000 UTC m=+145.804418774" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.183847 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c4wcz" podStartSLOduration=125.183837924 podStartE2EDuration="2m5.183837924s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.159948742 +0000 UTC m=+145.781552822" watchObservedRunningTime="2026-02-02 16:52:34.183837924 +0000 UTC m=+145.805442004" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.192876 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" event={"ID":"99f30a12-2016-4dee-a4cc-02699b58d1a6","Type":"ContainerStarted","Data":"c3a3688e3e94e092534d17b3504b7c6659f5d479858cacbd34d9f5a8d8dace4d"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.195330 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" podStartSLOduration=124.195315811 podStartE2EDuration="2m4.195315811s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.193789656 +0000 UTC m=+145.815393736" watchObservedRunningTime="2026-02-02 16:52:34.195315811 +0000 UTC m=+145.816919891" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.196931 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" event={"ID":"09748673-fe28-490d-84f3-7c0170319531","Type":"ContainerStarted","Data":"5ae124e4a56adc8d4d1e661cb45541c726f37f79dec29725375f5099ae9f776b"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.197815 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.204754 4835 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-s4vdz container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.204804 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" podUID="09748673-fe28-490d-84f3-7c0170319531" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.208052 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" event={"ID":"42bd3315-fcd5-4c23-ada1-0e35123d3b7c","Type":"ContainerStarted","Data":"0e8cdba133f412bcf6aec6bb189f5122dbd737f5d59013ec44d6d788921e05fd"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.208499 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.212542 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" event={"ID":"b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9","Type":"ContainerStarted","Data":"e3d0165a137256349e1a0c62a011749c6e3761980f5f9a9cdaf6c516c4555ead"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.213714 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jsn2h" podStartSLOduration=124.213697161 podStartE2EDuration="2m4.213697161s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.211685702 +0000 UTC m=+145.833289782" watchObservedRunningTime="2026-02-02 16:52:34.213697161 +0000 UTC m=+145.835301241" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.220505 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-tqhv4" event={"ID":"2670e3af-3faf-4aa2-8674-ad7b94955ef0","Type":"ContainerStarted","Data":"6e69cbe3c51d1aea6777b1d129951dae4e61c987ca414823d30b24dbb868e9d3"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.220791 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-tqhv4" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.224010 4835 patch_prober.go:28] interesting pod/downloads-7954f5f757-tqhv4 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.224049 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tqhv4" podUID="2670e3af-3faf-4aa2-8674-ad7b94955ef0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.225422 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" event={"ID":"e69421a3-e42a-4e87-a164-486a774179ff","Type":"ContainerStarted","Data":"80f642bfc1de82ed7e1bf2b7e6ce62463e778dff6ead4da256ec5feb4274aaac"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.225468 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" event={"ID":"e69421a3-e42a-4e87-a164-486a774179ff","Type":"ContainerStarted","Data":"dd7e2b371fa41f73edb96cbfb3d133f14fc459770e9ad4a5da7c8a263d055b9b"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.240437 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" podStartSLOduration=124.240415046 podStartE2EDuration="2m4.240415046s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.239827929 +0000 UTC m=+145.861432029" watchObservedRunningTime="2026-02-02 16:52:34.240415046 +0000 UTC m=+145.862019136" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.241641 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" event={"ID":"6b160815-f795-4fb0-8f99-f6b3086709a5","Type":"ContainerStarted","Data":"79d6d81d8b296dc28f8ee9200e45ff9b3689295a1aeaec7353ac37c5f8e6a80c"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.244674 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.245736 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.745706352 +0000 UTC m=+146.367310432 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.256115 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9rdhl" event={"ID":"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1","Type":"ContainerStarted","Data":"07cf542869a41b6b8a0ded1bf7bad591dcb5f92037d041a28ae61c1caf2894aa"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.263687 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" podStartSLOduration=124.263671549 podStartE2EDuration="2m4.263671549s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.262257888 +0000 UTC m=+145.883861978" watchObservedRunningTime="2026-02-02 16:52:34.263671549 +0000 UTC m=+145.885275629" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.284658 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdfkf" podStartSLOduration=125.284637616 podStartE2EDuration="2m5.284637616s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.282503313 +0000 UTC m=+145.904107383" watchObservedRunningTime="2026-02-02 16:52:34.284637616 +0000 UTC m=+145.906241696" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.287418 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-7mhmj" event={"ID":"73c03460-5e98-4103-9da2-de2cf1c391f5","Type":"ContainerStarted","Data":"f9cd41defb483ee61a282334930b4d2ebd4ce72621db4f25f53dadb1f9dfa8b1"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.314060 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" event={"ID":"5213a65c-70c8-4bd5-8f3d-071943782bbf","Type":"ContainerStarted","Data":"8de9d52ea9ce795f7d66e33c53203580525546dcc6033958923b5aff468ffa85"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.315474 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-tqhv4" podStartSLOduration=125.315461831 podStartE2EDuration="2m5.315461831s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.315029228 +0000 UTC m=+145.936633318" watchObservedRunningTime="2026-02-02 16:52:34.315461831 +0000 UTC m=+145.937065911" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.322475 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" event={"ID":"20b9b4db-66e7-4732-986a-5b766e8bc6fb","Type":"ContainerStarted","Data":"baab1e4ff2b7c2ffe6b5655c29eb7487770a1ea30482950c215e18449c5c445b"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.323995 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" event={"ID":"1bfd176d-b2cc-45f8-a80d-61e391f25163","Type":"ContainerStarted","Data":"31db99e716a54ac578b7df24b69eb8645ce3179e1687f718e49010a20ca329e6"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.324547 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.327293 4835 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-k5vz8 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.327331 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" podUID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.328210 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" event={"ID":"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0","Type":"ContainerStarted","Data":"9341bd56003f1c6039b0eeab9f1dfd42f89d6fe0a3b0c408e0548c66e954d440"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.328241 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" event={"ID":"da2cb6be-96b3-4ac1-81ec-ac57cdf853c0","Type":"ContainerStarted","Data":"2a85bded86626046a984e54fd1fdcff82d2cca254eca355fe4e85e4573440571"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.330313 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" event={"ID":"32591974-ca1a-4284-a32d-eb3eb856a9d5","Type":"ContainerStarted","Data":"cb795e0de261ed170c2f64cc120de8ce5a29ea6881601f9544ee25e3a13a34f8"} Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.330750 4835 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-qqjn4 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.330783 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" podUID="5d31701b-ac62-4bdc-91d5-d9f411f6cf23" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.331055 4835 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-2n9fx container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.31:6443/healthz\": dial tcp 10.217.0.31:6443: connect: connection refused" start-of-body= Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.331095 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" podUID="ea193cb5-8e86-4628-a115-16a3987f4eaf" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.31:6443/healthz\": dial tcp 10.217.0.31:6443: connect: connection refused" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.334763 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-7q9qk" podStartSLOduration=7.334746598 podStartE2EDuration="7.334746598s" podCreationTimestamp="2026-02-02 16:52:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.332561584 +0000 UTC m=+145.954165664" watchObservedRunningTime="2026-02-02 16:52:34.334746598 +0000 UTC m=+145.956350698" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.349324 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.350553 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.850541982 +0000 UTC m=+146.472146062 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.382521 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-8wthb" podStartSLOduration=125.382499141 podStartE2EDuration="2m5.382499141s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.368560371 +0000 UTC m=+145.990164461" watchObservedRunningTime="2026-02-02 16:52:34.382499141 +0000 UTC m=+146.004103231" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.395373 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" podStartSLOduration=125.395350878 podStartE2EDuration="2m5.395350878s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.383438558 +0000 UTC m=+146.005042638" watchObservedRunningTime="2026-02-02 16:52:34.395350878 +0000 UTC m=+146.016954958" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.397071 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" podStartSLOduration=124.397058909 podStartE2EDuration="2m4.397058909s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.396718589 +0000 UTC m=+146.018322669" watchObservedRunningTime="2026-02-02 16:52:34.397058909 +0000 UTC m=+146.018662999" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.411766 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" podStartSLOduration=124.41175068 podStartE2EDuration="2m4.41175068s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.408995799 +0000 UTC m=+146.030599879" watchObservedRunningTime="2026-02-02 16:52:34.41175068 +0000 UTC m=+146.033354760" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.435327 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" podStartSLOduration=124.435310413 podStartE2EDuration="2m4.435310413s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.432234612 +0000 UTC m=+146.053838692" watchObservedRunningTime="2026-02-02 16:52:34.435310413 +0000 UTC m=+146.056914493" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.447693 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-67bm4" podStartSLOduration=125.447677026 podStartE2EDuration="2m5.447677026s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.446391328 +0000 UTC m=+146.067995428" watchObservedRunningTime="2026-02-02 16:52:34.447677026 +0000 UTC m=+146.069281106" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.451957 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.452146 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.952126637 +0000 UTC m=+146.573730717 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.452762 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.466197 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:34.965963693 +0000 UTC m=+146.587567783 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.479677 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-smljg" podStartSLOduration=124.479659156 podStartE2EDuration="2m4.479659156s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.478648906 +0000 UTC m=+146.100252986" watchObservedRunningTime="2026-02-02 16:52:34.479659156 +0000 UTC m=+146.101263236" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.499075 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9gvn" podStartSLOduration=125.499054345 podStartE2EDuration="2m5.499054345s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.497032546 +0000 UTC m=+146.118636616" watchObservedRunningTime="2026-02-02 16:52:34.499054345 +0000 UTC m=+146.120658425" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.520701 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-7mhmj" podStartSLOduration=6.520683441 podStartE2EDuration="6.520683441s" podCreationTimestamp="2026-02-02 16:52:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.519073404 +0000 UTC m=+146.140677494" watchObservedRunningTime="2026-02-02 16:52:34.520683441 +0000 UTC m=+146.142287531" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.550762 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" podStartSLOduration=125.550730434 podStartE2EDuration="2m5.550730434s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.549282271 +0000 UTC m=+146.170886341" watchObservedRunningTime="2026-02-02 16:52:34.550730434 +0000 UTC m=+146.172334514" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.554250 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.554482 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.054467234 +0000 UTC m=+146.676071314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.554677 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.554989 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.054982809 +0000 UTC m=+146.676586889 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.579529 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-76pc7" podStartSLOduration=125.579511659 podStartE2EDuration="2m5.579511659s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:34.576332176 +0000 UTC m=+146.197936256" watchObservedRunningTime="2026-02-02 16:52:34.579511659 +0000 UTC m=+146.201115739" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.655729 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.656384 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.156369558 +0000 UTC m=+146.777973638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.758349 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.758761 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.258743456 +0000 UTC m=+146.880347536 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.779943 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.859790 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.860004 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.35997135 +0000 UTC m=+146.981575430 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.860936 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.360917418 +0000 UTC m=+146.982521498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.861172 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.962183 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.962327 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.462303637 +0000 UTC m=+147.083907717 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:34 crc kubenswrapper[4835]: I0202 16:52:34.962719 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:34 crc kubenswrapper[4835]: E0202 16:52:34.963154 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.463135201 +0000 UTC m=+147.084739281 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.032128 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:35 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:35 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:35 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.032204 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.064139 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.064357 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.564331963 +0000 UTC m=+147.185936043 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.064437 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.064763 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.564753776 +0000 UTC m=+147.186357856 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.165935 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.166108 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.666077883 +0000 UTC m=+147.287681963 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.166563 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.166942 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.666930418 +0000 UTC m=+147.288534498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.267757 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.267872 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.767853463 +0000 UTC m=+147.389457543 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.268146 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.268445 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.76843754 +0000 UTC m=+147.390041620 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.335956 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" event={"ID":"2b288d3c-3c0e-4290-819a-d5c1e7eedcf0","Type":"ContainerStarted","Data":"f9d64d4ee6c1ed1918fd21297d37ef768b54e1c500c8f5dea668055ae8205dc0"} Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.357506 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wq452" event={"ID":"d12f85a7-4683-4ad1-aa7c-0c30b52f976f","Type":"ContainerStarted","Data":"d785514dabf37893d9dd99c02986f7de802140c2b14600d128fc1659218c83bb"} Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.360159 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9rdhl" event={"ID":"b0dd1cd6-a9c4-4dda-8383-7e1d69ac6ca1","Type":"ContainerStarted","Data":"097aa1116013e2b7c1b5dd602d619abe843769fad6f6e1e6edd697102dd8ea46"} Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.360331 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.362195 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nwzzt" event={"ID":"20b9b4db-66e7-4732-986a-5b766e8bc6fb","Type":"ContainerStarted","Data":"4f1b683473e30c4fb7eea8e78a64296dcff59f14f0cb22ae8f5afbb502d856eb"} Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.363679 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wq59n" event={"ID":"9e36008e-f103-4e5a-9543-6d3ef330d446","Type":"ContainerStarted","Data":"889f8ac208ba71d13aa13209bbf8ca67c05be82b009bc4025fc7be26034236aa"} Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.364160 4835 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-shcv8 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" start-of-body= Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.364193 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" podUID="9dfad441-aeb8-4d7d-962a-a2b0d352dac0" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.364769 4835 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-cs4g8 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.364787 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" podUID="dd8b41f5-efda-47ad-83ef-891bcf11fc70" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.365603 4835 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-s4vdz container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.365648 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" podUID="09748673-fe28-490d-84f3-7c0170319531" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.365990 4835 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-k5vz8 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.366013 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" podUID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.366065 4835 patch_prober.go:28] interesting pod/downloads-7954f5f757-tqhv4 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.366096 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tqhv4" podUID="2670e3af-3faf-4aa2-8674-ad7b94955ef0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.369049 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.369443 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.869426268 +0000 UTC m=+147.491030338 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.381402 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-cs52k" podStartSLOduration=125.381384019 podStartE2EDuration="2m5.381384019s" podCreationTimestamp="2026-02-02 16:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:35.379337859 +0000 UTC m=+147.000941939" watchObservedRunningTime="2026-02-02 16:52:35.381384019 +0000 UTC m=+147.002988099" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.439862 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-wq452" podStartSLOduration=126.439842757 podStartE2EDuration="2m6.439842757s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:35.439539738 +0000 UTC m=+147.061143818" watchObservedRunningTime="2026-02-02 16:52:35.439842757 +0000 UTC m=+147.061446837" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.444819 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.444875 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.445998 4835 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-s2f6g container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.9:8443/livez\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.446061 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" podUID="b6f3dc6d-8984-420d-81cd-e2a2db1d4bd9" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.9:8443/livez\": dial tcp 10.217.0.9:8443: connect: connection refused" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.460192 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-9rdhl" podStartSLOduration=7.460173644 podStartE2EDuration="7.460173644s" podCreationTimestamp="2026-02-02 16:52:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:35.458705631 +0000 UTC m=+147.080309721" watchObservedRunningTime="2026-02-02 16:52:35.460173644 +0000 UTC m=+147.081777724" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.471697 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.474095 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:35.974083143 +0000 UTC m=+147.595687223 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.576953 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.577133 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.07710513 +0000 UTC m=+147.698709220 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.577562 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.577967 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.077952234 +0000 UTC m=+147.699556314 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.609004 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.678867 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.678969 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.178954272 +0000 UTC m=+147.800558352 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.679172 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.679611 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.179595651 +0000 UTC m=+147.801199731 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.780768 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.780967 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.280938079 +0000 UTC m=+147.902542159 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.781133 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.781539 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.281524496 +0000 UTC m=+147.903128576 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.810755 4835 csr.go:261] certificate signing request csr-d69cm is approved, waiting to be issued Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.818887 4835 csr.go:257] certificate signing request csr-d69cm is issued Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.882470 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.882610 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.382590445 +0000 UTC m=+148.004194535 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.882705 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.883051 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.383041989 +0000 UTC m=+148.004646069 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:35 crc kubenswrapper[4835]: I0202 16:52:35.983921 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:35 crc kubenswrapper[4835]: E0202 16:52:35.984352 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.484333575 +0000 UTC m=+148.105937655 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.024770 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:36 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:36 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:36 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.024823 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.085729 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:36 crc kubenswrapper[4835]: E0202 16:52:36.086146 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.586132066 +0000 UTC m=+148.207736146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.187191 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:36 crc kubenswrapper[4835]: E0202 16:52:36.187908 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.687886795 +0000 UTC m=+148.309490895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.203298 4835 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-2w4fx container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.203345 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" podUID="a130c6a7-c518-4407-82b0-eb291617a482" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.203369 4835 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-2w4fx container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.203391 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" podUID="a130c6a7-c518-4407-82b0-eb291617a482" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.288900 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:36 crc kubenswrapper[4835]: E0202 16:52:36.289214 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.789200802 +0000 UTC m=+148.410804882 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.372633 4835 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-k5vz8 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.372675 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" podUID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.377914 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s4vdz" Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.390768 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:36 crc kubenswrapper[4835]: E0202 16:52:36.391153 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.891134787 +0000 UTC m=+148.512738867 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.492791 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:36 crc kubenswrapper[4835]: E0202 16:52:36.495255 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:36.995240966 +0000 UTC m=+148.616845046 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.594492 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:36 crc kubenswrapper[4835]: E0202 16:52:36.594708 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.094681208 +0000 UTC m=+148.716285288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.594781 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:36 crc kubenswrapper[4835]: E0202 16:52:36.595040 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.095027488 +0000 UTC m=+148.716631568 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.696346 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:36 crc kubenswrapper[4835]: E0202 16:52:36.696727 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.196711356 +0000 UTC m=+148.818315436 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.797896 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:36 crc kubenswrapper[4835]: E0202 16:52:36.798339 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.298322511 +0000 UTC m=+148.919926591 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.819981 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-02 16:47:35 +0000 UTC, rotation deadline is 2026-11-18 22:55:39.700484186 +0000 UTC Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.820024 4835 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6942h3m2.880463537s for next certificate rotation Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.899452 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:36 crc kubenswrapper[4835]: E0202 16:52:36.899637 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.399610437 +0000 UTC m=+149.021214517 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:36 crc kubenswrapper[4835]: I0202 16:52:36.899778 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:36 crc kubenswrapper[4835]: E0202 16:52:36.900096 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.400087891 +0000 UTC m=+149.021692051 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.000976 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.001112 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.501091479 +0000 UTC m=+149.122695559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.001214 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.001568 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.501559823 +0000 UTC m=+149.123163903 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.028435 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:37 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:37 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:37 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.028503 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.102334 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.102567 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.102767 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.602741115 +0000 UTC m=+149.224345195 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.103284 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.203893 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.203962 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.203990 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.204009 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.204415 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.704397192 +0000 UTC m=+149.326001272 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.212954 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.216888 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.227410 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.304841 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.305002 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.804978898 +0000 UTC m=+149.426582978 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.305181 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.305514 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.805506903 +0000 UTC m=+149.427110983 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.321381 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.328714 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.382648 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.383981 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.393222 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.393423 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.406200 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.406414 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.406750 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:37.906735457 +0000 UTC m=+149.528339537 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.492239 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5cbxv"] Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.493104 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.501366 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.510349 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64046685-4d75-4e50-9645-b359bb38b948-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"64046685-4d75-4e50-9645-b359bb38b948\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.510450 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.510480 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/64046685-4d75-4e50-9645-b359bb38b948-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"64046685-4d75-4e50-9645-b359bb38b948\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.511383 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:38.011367942 +0000 UTC m=+149.632972022 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.511714 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.524095 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5cbxv"] Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.617984 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.618189 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:38.118160149 +0000 UTC m=+149.739764239 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.618233 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.618293 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/64046685-4d75-4e50-9645-b359bb38b948-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"64046685-4d75-4e50-9645-b359bb38b948\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.618378 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64046685-4d75-4e50-9645-b359bb38b948-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"64046685-4d75-4e50-9645-b359bb38b948\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.618406 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-utilities\") pod \"certified-operators-5cbxv\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.618431 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx9wv\" (UniqueName: \"kubernetes.io/projected/29dff398-e620-4558-854e-3e9fb13f1b25-kube-api-access-xx9wv\") pod \"certified-operators-5cbxv\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.618470 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-catalog-content\") pod \"certified-operators-5cbxv\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.618772 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:38.118765237 +0000 UTC m=+149.740369317 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.618913 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/64046685-4d75-4e50-9645-b359bb38b948-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"64046685-4d75-4e50-9645-b359bb38b948\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.671121 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64046685-4d75-4e50-9645-b359bb38b948-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"64046685-4d75-4e50-9645-b359bb38b948\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.689541 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jq6s4"] Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.693427 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.698455 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.704104 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jq6s4"] Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.720186 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.720448 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-utilities\") pod \"certified-operators-5cbxv\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.720482 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx9wv\" (UniqueName: \"kubernetes.io/projected/29dff398-e620-4558-854e-3e9fb13f1b25-kube-api-access-xx9wv\") pod \"certified-operators-5cbxv\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.720520 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-catalog-content\") pod \"certified-operators-5cbxv\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.720979 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-catalog-content\") pod \"certified-operators-5cbxv\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.721048 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:38.221032692 +0000 UTC m=+149.842636772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.721244 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-utilities\") pod \"certified-operators-5cbxv\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.723824 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.766993 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx9wv\" (UniqueName: \"kubernetes.io/projected/29dff398-e620-4558-854e-3e9fb13f1b25-kube-api-access-xx9wv\") pod \"certified-operators-5cbxv\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.810559 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.822649 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-utilities\") pod \"community-operators-jq6s4\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.822751 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.822783 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-catalog-content\") pod \"community-operators-jq6s4\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.822802 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69xjz\" (UniqueName: \"kubernetes.io/projected/5eecd945-3eb3-4384-9836-c1a65b49063f-kube-api-access-69xjz\") pod \"community-operators-jq6s4\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.823125 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:38.323113001 +0000 UTC m=+149.944717081 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.877860 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q9mlm"] Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.878737 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.909752 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q9mlm"] Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.924802 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.925057 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-catalog-content\") pod \"community-operators-jq6s4\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.925089 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69xjz\" (UniqueName: \"kubernetes.io/projected/5eecd945-3eb3-4384-9836-c1a65b49063f-kube-api-access-69xjz\") pod \"community-operators-jq6s4\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.925127 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-utilities\") pod \"community-operators-jq6s4\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:52:37 crc kubenswrapper[4835]: E0202 16:52:37.925649 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:38.425625893 +0000 UTC m=+150.047229963 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.926003 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-utilities\") pod \"community-operators-jq6s4\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:52:37 crc kubenswrapper[4835]: I0202 16:52:37.926369 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-catalog-content\") pod \"community-operators-jq6s4\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.026299 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-utilities\") pod \"certified-operators-q9mlm\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.026365 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-catalog-content\") pod \"certified-operators-q9mlm\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.026397 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.026444 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-478rx\" (UniqueName: \"kubernetes.io/projected/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-kube-api-access-478rx\") pod \"certified-operators-q9mlm\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:52:38 crc kubenswrapper[4835]: E0202 16:52:38.026702 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:38.526691713 +0000 UTC m=+150.148295793 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.043529 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69xjz\" (UniqueName: \"kubernetes.io/projected/5eecd945-3eb3-4384-9836-c1a65b49063f-kube-api-access-69xjz\") pod \"community-operators-jq6s4\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.043812 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:38 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:38 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:38 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.043856 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.084233 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v9gmt"] Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.085486 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.126995 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.127252 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-utilities\") pod \"certified-operators-q9mlm\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.127310 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-catalog-content\") pod \"certified-operators-q9mlm\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.127366 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-478rx\" (UniqueName: \"kubernetes.io/projected/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-kube-api-access-478rx\") pod \"certified-operators-q9mlm\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:52:38 crc kubenswrapper[4835]: E0202 16:52:38.127708 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:38.62769511 +0000 UTC m=+150.249299190 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.128054 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-utilities\") pod \"certified-operators-q9mlm\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.128302 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-catalog-content\") pod \"certified-operators-q9mlm\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.165604 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-478rx\" (UniqueName: \"kubernetes.io/projected/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-kube-api-access-478rx\") pod \"certified-operators-q9mlm\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.206540 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v9gmt"] Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.228556 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-utilities\") pod \"community-operators-v9gmt\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.228637 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.228713 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-catalog-content\") pod \"community-operators-v9gmt\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.228754 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8vcw\" (UniqueName: \"kubernetes.io/projected/a810d18d-1792-4733-9325-5960e398f425-kube-api-access-x8vcw\") pod \"community-operators-v9gmt\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:52:38 crc kubenswrapper[4835]: E0202 16:52:38.229178 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:38.729162902 +0000 UTC m=+150.350766982 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.237709 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.319667 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.336648 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.336925 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8vcw\" (UniqueName: \"kubernetes.io/projected/a810d18d-1792-4733-9325-5960e398f425-kube-api-access-x8vcw\") pod \"community-operators-v9gmt\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.337006 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-utilities\") pod \"community-operators-v9gmt\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.337102 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-catalog-content\") pod \"community-operators-v9gmt\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.337629 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-catalog-content\") pod \"community-operators-v9gmt\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:52:38 crc kubenswrapper[4835]: E0202 16:52:38.337747 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:38.837725661 +0000 UTC m=+150.459329741 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.338364 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-utilities\") pod \"community-operators-v9gmt\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.398549 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8vcw\" (UniqueName: \"kubernetes.io/projected/a810d18d-1792-4733-9325-5960e398f425-kube-api-access-x8vcw\") pod \"community-operators-v9gmt\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.428721 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.436410 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8b5574f8a917e1576dd3252b99b51ab9cd02d0e381bc9cd5a9143842dc4de680"} Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.439803 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:38 crc kubenswrapper[4835]: E0202 16:52:38.440092 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:38.940077519 +0000 UTC m=+150.561681599 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.517728 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wq59n" event={"ID":"9e36008e-f103-4e5a-9543-6d3ef330d446","Type":"ContainerStarted","Data":"0226918b3452339a6afa7ac2c19b37b65defb7faa4c28e472d7ac928422d3305"} Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.535200 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.540430 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.540454 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"01e2b65e0ff7bbe55aeebd514bc792e9ab0d655dc55598cb59b72f338a2bae45"} Feb 02 16:52:38 crc kubenswrapper[4835]: E0202 16:52:38.540804 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:39.040772317 +0000 UTC m=+150.662376397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.540963 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:38 crc kubenswrapper[4835]: E0202 16:52:38.541677 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:39.041489308 +0000 UTC m=+150.663093388 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.642218 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:38 crc kubenswrapper[4835]: E0202 16:52:38.642819 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:39.142804864 +0000 UTC m=+150.764408944 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.685800 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5cbxv"] Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.743660 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:38 crc kubenswrapper[4835]: E0202 16:52:38.744228 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:39.244215114 +0000 UTC m=+150.865819194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.859777 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:38 crc kubenswrapper[4835]: E0202 16:52:38.860204 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:39.360179611 +0000 UTC m=+150.981783691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.865067 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q9mlm"] Feb 02 16:52:38 crc kubenswrapper[4835]: W0202 16:52:38.894252 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb4ac95a5_3112_4034_ab53_40d9c5cf13ac.slice/crio-848245bf5926a9c355ece925a068a8a54782782e4b85cb32c368857ccf7dcd17 WatchSource:0}: Error finding container 848245bf5926a9c355ece925a068a8a54782782e4b85cb32c368857ccf7dcd17: Status 404 returned error can't find the container with id 848245bf5926a9c355ece925a068a8a54782782e4b85cb32c368857ccf7dcd17 Feb 02 16:52:38 crc kubenswrapper[4835]: I0202 16:52:38.961044 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:38 crc kubenswrapper[4835]: E0202 16:52:38.961472 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:39.461456627 +0000 UTC m=+151.083060707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.026774 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:39 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:39 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:39 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.026828 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.061839 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:39 crc kubenswrapper[4835]: E0202 16:52:39.062252 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:39.562233098 +0000 UTC m=+151.183837188 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.083880 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jq6s4"] Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.164085 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:39 crc kubenswrapper[4835]: E0202 16:52:39.164639 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:39.664628506 +0000 UTC m=+151.286232586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.209796 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v9gmt"] Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.222480 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2w4fx" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.266867 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:39 crc kubenswrapper[4835]: E0202 16:52:39.267250 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:39.767228361 +0000 UTC m=+151.388832441 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.267505 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:39 crc kubenswrapper[4835]: E0202 16:52:39.267748 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 16:52:39.767741176 +0000 UTC m=+151.389345256 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-sfbf9" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.276299 4835 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.357110 4835 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-02-02T16:52:39.276317508Z","Handler":null,"Name":""} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.368120 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:39 crc kubenswrapper[4835]: E0202 16:52:39.368432 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 16:52:39.868410684 +0000 UTC m=+151.490014764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.369826 4835 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.369863 4835 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.459578 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w9l6m"] Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.460924 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.471669 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.473433 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.474981 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w9l6m"] Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.479049 4835 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.479090 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.529722 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-sfbf9\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.554330 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.559604 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f79f8961e29b0d5400dd93a1c096f491d2b97ad17b886e9fe45f811a37c21f01"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.564168 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"c32e5b30a65a63507f48e0e78b1101c1b04f5293245a6ac4aa56c2d8909bc4ea"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.564210 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d02a3b2134f861ac37aa0a75d7edd48778d85f7d26fa2b4cac94186d1822afd8"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.564706 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.570985 4835 generic.go:334] "Generic (PLEG): container finished" podID="29dff398-e620-4558-854e-3e9fb13f1b25" containerID="363ca570d0d837336480ba50eddbf1bd7af8d06b2ddeb0bf58ef2b6da2c49045" exitCode=0 Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.571325 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5cbxv" event={"ID":"29dff398-e620-4558-854e-3e9fb13f1b25","Type":"ContainerDied","Data":"363ca570d0d837336480ba50eddbf1bd7af8d06b2ddeb0bf58ef2b6da2c49045"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.571387 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5cbxv" event={"ID":"29dff398-e620-4558-854e-3e9fb13f1b25","Type":"ContainerStarted","Data":"cd0d6dacbe048e20bc7e14c31dc877ec938a45e88fd03da2efa2185d97e06e3a"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.572723 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.575456 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.576180 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-catalog-content\") pod \"redhat-marketplace-w9l6m\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.576311 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fmd7\" (UniqueName: \"kubernetes.io/projected/4b92b257-9045-493d-9c64-0e3660e8513a-kube-api-access-6fmd7\") pod \"redhat-marketplace-w9l6m\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.576944 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-utilities\") pod \"redhat-marketplace-w9l6m\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.577811 4835 generic.go:334] "Generic (PLEG): container finished" podID="5eecd945-3eb3-4384-9836-c1a65b49063f" containerID="7160b0ddc6c1e9d747f6356dc068f09629beb43d94e29368039bf99287224b3c" exitCode=0 Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.578124 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jq6s4" event={"ID":"5eecd945-3eb3-4384-9836-c1a65b49063f","Type":"ContainerDied","Data":"7160b0ddc6c1e9d747f6356dc068f09629beb43d94e29368039bf99287224b3c"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.578181 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jq6s4" event={"ID":"5eecd945-3eb3-4384-9836-c1a65b49063f","Type":"ContainerStarted","Data":"201240fd3393adbd5b80ea8fd9e7522edb3592122d19ac4311c4a8f368bfb740"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.580327 4835 generic.go:334] "Generic (PLEG): container finished" podID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" containerID="a5595392ad94800fd4e2ff3d0f050e55c6c429a4b6d40946bed8e373b377a3c5" exitCode=0 Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.580423 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9mlm" event={"ID":"b4ac95a5-3112-4034-ab53-40d9c5cf13ac","Type":"ContainerDied","Data":"a5595392ad94800fd4e2ff3d0f050e55c6c429a4b6d40946bed8e373b377a3c5"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.580460 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9mlm" event={"ID":"b4ac95a5-3112-4034-ab53-40d9c5cf13ac","Type":"ContainerStarted","Data":"848245bf5926a9c355ece925a068a8a54782782e4b85cb32c368857ccf7dcd17"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.584625 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wq59n" event={"ID":"9e36008e-f103-4e5a-9543-6d3ef330d446","Type":"ContainerStarted","Data":"a2e8727dac91b6bc16e9ab836d3a65fca055699fd5922d2016b9374f40186d3c"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.584668 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wq59n" event={"ID":"9e36008e-f103-4e5a-9543-6d3ef330d446","Type":"ContainerStarted","Data":"d703c6c4166fe2be3fceb62b61e31c5f89469e755ebc1c7fd052886f05a6dd4d"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.587379 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4e72c94d0045436bd80944a762621ab7a93002a514117ff7f03940038a5060b2"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.589053 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"64046685-4d75-4e50-9645-b359bb38b948","Type":"ContainerStarted","Data":"ce2835f776d7eea15fb826246f59369f66392bd6787c6494ff43b64d2aa6cea3"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.589079 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"64046685-4d75-4e50-9645-b359bb38b948","Type":"ContainerStarted","Data":"73a76a7c9239b4cfe5dbbcce21a7de255ddb4b99adbe0cc1916f9269f8cf7c1b"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.590563 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9gmt" event={"ID":"a810d18d-1792-4733-9325-5960e398f425","Type":"ContainerStarted","Data":"2f092efd10bbe80b3a035c0cc53be0f017e2c9a7ffcbdbb98d5bd8ea0d489779"} Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.603033 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.686016 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-utilities\") pod \"redhat-marketplace-w9l6m\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.686087 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-catalog-content\") pod \"redhat-marketplace-w9l6m\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.686137 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fmd7\" (UniqueName: \"kubernetes.io/projected/4b92b257-9045-493d-9c64-0e3660e8513a-kube-api-access-6fmd7\") pod \"redhat-marketplace-w9l6m\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.686476 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-utilities\") pod \"redhat-marketplace-w9l6m\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.687257 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-catalog-content\") pod \"redhat-marketplace-w9l6m\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.701572 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-wq59n" podStartSLOduration=11.701554432 podStartE2EDuration="11.701554432s" podCreationTimestamp="2026-02-02 16:52:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:39.700835441 +0000 UTC m=+151.322439521" watchObservedRunningTime="2026-02-02 16:52:39.701554432 +0000 UTC m=+151.323158502" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.722922 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.722900409 podStartE2EDuration="2.722900409s" podCreationTimestamp="2026-02-02 16:52:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:39.719804078 +0000 UTC m=+151.341408158" watchObservedRunningTime="2026-02-02 16:52:39.722900409 +0000 UTC m=+151.344504489" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.724542 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fmd7\" (UniqueName: \"kubernetes.io/projected/4b92b257-9045-493d-9c64-0e3660e8513a-kube-api-access-6fmd7\") pod \"redhat-marketplace-w9l6m\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.838018 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sfbf9"] Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.842603 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:52:39 crc kubenswrapper[4835]: W0202 16:52:39.853985 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2ba2804_15a2_4d2c_b0db_a2b5f24bf2a1.slice/crio-78016984562ae2d46ff4893e9323de65048cc84d85e2fdd1cabf887b1244f42e WatchSource:0}: Error finding container 78016984562ae2d46ff4893e9323de65048cc84d85e2fdd1cabf887b1244f42e: Status 404 returned error can't find the container with id 78016984562ae2d46ff4893e9323de65048cc84d85e2fdd1cabf887b1244f42e Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.855187 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8bccz"] Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.856301 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.870256 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8bccz"] Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.991650 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6c4pc\" (UniqueName: \"kubernetes.io/projected/56962d65-7f11-44f4-b09c-73302933d1a8-kube-api-access-6c4pc\") pod \"redhat-marketplace-8bccz\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.991684 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-utilities\") pod \"redhat-marketplace-8bccz\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:52:39 crc kubenswrapper[4835]: I0202 16:52:39.991724 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-catalog-content\") pod \"redhat-marketplace-8bccz\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.021414 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.021447 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.026583 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:40 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:40 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:40 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.026623 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.045536 4835 patch_prober.go:28] interesting pod/apiserver-76f77b778f-wq452 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Feb 02 16:52:40 crc kubenswrapper[4835]: [+]log ok Feb 02 16:52:40 crc kubenswrapper[4835]: [+]etcd ok Feb 02 16:52:40 crc kubenswrapper[4835]: [+]poststarthook/start-apiserver-admission-initializer ok Feb 02 16:52:40 crc kubenswrapper[4835]: [+]poststarthook/generic-apiserver-start-informers ok Feb 02 16:52:40 crc kubenswrapper[4835]: [+]poststarthook/max-in-flight-filter ok Feb 02 16:52:40 crc kubenswrapper[4835]: [+]poststarthook/storage-object-count-tracker-hook ok Feb 02 16:52:40 crc kubenswrapper[4835]: [+]poststarthook/image.openshift.io-apiserver-caches ok Feb 02 16:52:40 crc kubenswrapper[4835]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Feb 02 16:52:40 crc kubenswrapper[4835]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Feb 02 16:52:40 crc kubenswrapper[4835]: [+]poststarthook/project.openshift.io-projectcache ok Feb 02 16:52:40 crc kubenswrapper[4835]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Feb 02 16:52:40 crc kubenswrapper[4835]: [+]poststarthook/openshift.io-startinformers ok Feb 02 16:52:40 crc kubenswrapper[4835]: [+]poststarthook/openshift.io-restmapperupdater ok Feb 02 16:52:40 crc kubenswrapper[4835]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Feb 02 16:52:40 crc kubenswrapper[4835]: livez check failed Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.045594 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-wq452" podUID="d12f85a7-4683-4ad1-aa7c-0c30b52f976f" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.067468 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w9l6m"] Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.077810 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.093053 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6c4pc\" (UniqueName: \"kubernetes.io/projected/56962d65-7f11-44f4-b09c-73302933d1a8-kube-api-access-6c4pc\") pod \"redhat-marketplace-8bccz\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.093098 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-utilities\") pod \"redhat-marketplace-8bccz\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.093138 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-catalog-content\") pod \"redhat-marketplace-8bccz\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.093665 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-catalog-content\") pod \"redhat-marketplace-8bccz\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.093806 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-utilities\") pod \"redhat-marketplace-8bccz\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.116478 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6c4pc\" (UniqueName: \"kubernetes.io/projected/56962d65-7f11-44f4-b09c-73302933d1a8-kube-api-access-6c4pc\") pod \"redhat-marketplace-8bccz\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.190906 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.254067 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-dg8r2" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.420200 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8bccz"] Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.458480 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.465622 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s2f6g" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.471761 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.584482 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.585020 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.598533 4835 patch_prober.go:28] interesting pod/console-f9d7485db-xpn8c container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.29:8443/health\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.598582 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-xpn8c" podUID="aad2f2e8-6800-4238-a0ab-ee3304bad4c1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.29:8443/health\": dial tcp 10.217.0.29:8443: connect: connection refused" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.626493 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8bccz" event={"ID":"56962d65-7f11-44f4-b09c-73302933d1a8","Type":"ContainerStarted","Data":"ce005b54e492d76c159b4acbf855108cdb545ed9c0b62bac20f0fdce6975c5fa"} Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.633349 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" event={"ID":"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1","Type":"ContainerStarted","Data":"0f8773226ce829e53ffa90f6677a610fb4607d3fb50ba119d06845dd2095ebd3"} Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.633404 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" event={"ID":"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1","Type":"ContainerStarted","Data":"78016984562ae2d46ff4893e9323de65048cc84d85e2fdd1cabf887b1244f42e"} Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.634248 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.641231 4835 generic.go:334] "Generic (PLEG): container finished" podID="a810d18d-1792-4733-9325-5960e398f425" containerID="21972349088e52a971b97e7ed700dcfc54053b64d4ef4cfabdb38b57fd27bd47" exitCode=0 Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.641577 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9gmt" event={"ID":"a810d18d-1792-4733-9325-5960e398f425","Type":"ContainerDied","Data":"21972349088e52a971b97e7ed700dcfc54053b64d4ef4cfabdb38b57fd27bd47"} Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.661521 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" podStartSLOduration=131.661496347 podStartE2EDuration="2m11.661496347s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:40.658813388 +0000 UTC m=+152.280417488" watchObservedRunningTime="2026-02-02 16:52:40.661496347 +0000 UTC m=+152.283100427" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.673785 4835 patch_prober.go:28] interesting pod/downloads-7954f5f757-tqhv4 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.673859 4835 patch_prober.go:28] interesting pod/downloads-7954f5f757-tqhv4 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.673896 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tqhv4" podUID="2670e3af-3faf-4aa2-8674-ad7b94955ef0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.675864 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-tqhv4" podUID="2670e3af-3faf-4aa2-8674-ad7b94955ef0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.685394 4835 generic.go:334] "Generic (PLEG): container finished" podID="64046685-4d75-4e50-9645-b359bb38b948" containerID="ce2835f776d7eea15fb826246f59369f66392bd6787c6494ff43b64d2aa6cea3" exitCode=0 Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.685633 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"64046685-4d75-4e50-9645-b359bb38b948","Type":"ContainerDied","Data":"ce2835f776d7eea15fb826246f59369f66392bd6787c6494ff43b64d2aa6cea3"} Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.704081 4835 generic.go:334] "Generic (PLEG): container finished" podID="4b92b257-9045-493d-9c64-0e3660e8513a" containerID="88b375175c8d07d9509a593ea8e6e22ff0213a2aece018b34b14e50e4201da4c" exitCode=0 Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.705727 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w9l6m" event={"ID":"4b92b257-9045-493d-9c64-0e3660e8513a","Type":"ContainerDied","Data":"88b375175c8d07d9509a593ea8e6e22ff0213a2aece018b34b14e50e4201da4c"} Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.705763 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w9l6m" event={"ID":"4b92b257-9045-493d-9c64-0e3660e8513a","Type":"ContainerStarted","Data":"425afc8d3a1a2edd1234dcc683c8d11c2625455132006f7249efa56e634f628c"} Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.772651 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-shcv8" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.880542 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-56ncc"] Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.884854 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.891245 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 16:52:40 crc kubenswrapper[4835]: I0202 16:52:40.898597 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-56ncc"] Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.012984 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cs4g8" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.014164 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-catalog-content\") pod \"redhat-operators-56ncc\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.014224 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r76qs\" (UniqueName: \"kubernetes.io/projected/cd498040-6af5-4953-8b1c-ea3803ba1b2a-kube-api-access-r76qs\") pod \"redhat-operators-56ncc\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.014258 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-utilities\") pod \"redhat-operators-56ncc\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.022377 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.028136 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:41 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:41 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:41 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.028304 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.079434 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.117938 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-catalog-content\") pod \"redhat-operators-56ncc\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.117990 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r76qs\" (UniqueName: \"kubernetes.io/projected/cd498040-6af5-4953-8b1c-ea3803ba1b2a-kube-api-access-r76qs\") pod \"redhat-operators-56ncc\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.118045 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-utilities\") pod \"redhat-operators-56ncc\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.119020 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-utilities\") pod \"redhat-operators-56ncc\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.119237 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-catalog-content\") pod \"redhat-operators-56ncc\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.168793 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r76qs\" (UniqueName: \"kubernetes.io/projected/cd498040-6af5-4953-8b1c-ea3803ba1b2a-kube-api-access-r76qs\") pod \"redhat-operators-56ncc\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.205854 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.263286 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.263699 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-k2lvh"] Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.264759 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.282795 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k2lvh"] Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.422264 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gms2p\" (UniqueName: \"kubernetes.io/projected/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-kube-api-access-gms2p\") pod \"redhat-operators-k2lvh\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.422352 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-catalog-content\") pod \"redhat-operators-k2lvh\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.422577 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-utilities\") pod \"redhat-operators-k2lvh\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.524531 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gms2p\" (UniqueName: \"kubernetes.io/projected/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-kube-api-access-gms2p\") pod \"redhat-operators-k2lvh\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.524581 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-catalog-content\") pod \"redhat-operators-k2lvh\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.524624 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-utilities\") pod \"redhat-operators-k2lvh\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.525115 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-utilities\") pod \"redhat-operators-k2lvh\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.525881 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-catalog-content\") pod \"redhat-operators-k2lvh\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.543105 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gms2p\" (UniqueName: \"kubernetes.io/projected/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-kube-api-access-gms2p\") pod \"redhat-operators-k2lvh\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.564294 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-56ncc"] Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.661986 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.714075 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-56ncc" event={"ID":"cd498040-6af5-4953-8b1c-ea3803ba1b2a","Type":"ContainerStarted","Data":"70d6d6c958a7269694d4f3f3c7e2aece40f4a5c0e96ef8536a90e9ac26dbacfb"} Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.717407 4835 generic.go:334] "Generic (PLEG): container finished" podID="56962d65-7f11-44f4-b09c-73302933d1a8" containerID="41bfd59c9dc8c489df97bcc2d461123b939829775c55ace97f225efe733897d1" exitCode=0 Feb 02 16:52:41 crc kubenswrapper[4835]: I0202 16:52:41.717753 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8bccz" event={"ID":"56962d65-7f11-44f4-b09c-73302933d1a8","Type":"ContainerDied","Data":"41bfd59c9dc8c489df97bcc2d461123b939829775c55ace97f225efe733897d1"} Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.014983 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k2lvh"] Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.026088 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:42 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:42 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:42 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.026146 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.149546 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.238781 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/64046685-4d75-4e50-9645-b359bb38b948-kubelet-dir\") pod \"64046685-4d75-4e50-9645-b359bb38b948\" (UID: \"64046685-4d75-4e50-9645-b359bb38b948\") " Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.238917 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64046685-4d75-4e50-9645-b359bb38b948-kube-api-access\") pod \"64046685-4d75-4e50-9645-b359bb38b948\" (UID: \"64046685-4d75-4e50-9645-b359bb38b948\") " Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.239243 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64046685-4d75-4e50-9645-b359bb38b948-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "64046685-4d75-4e50-9645-b359bb38b948" (UID: "64046685-4d75-4e50-9645-b359bb38b948"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.246752 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64046685-4d75-4e50-9645-b359bb38b948-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "64046685-4d75-4e50-9645-b359bb38b948" (UID: "64046685-4d75-4e50-9645-b359bb38b948"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.340210 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64046685-4d75-4e50-9645-b359bb38b948-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.340244 4835 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/64046685-4d75-4e50-9645-b359bb38b948-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.727795 4835 generic.go:334] "Generic (PLEG): container finished" podID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" containerID="a5c2162d2e6264c55e602bf1e8709907d8864c3da64121fe4284b8f491f05845" exitCode=0 Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.727880 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k2lvh" event={"ID":"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed","Type":"ContainerDied","Data":"a5c2162d2e6264c55e602bf1e8709907d8864c3da64121fe4284b8f491f05845"} Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.728293 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k2lvh" event={"ID":"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed","Type":"ContainerStarted","Data":"1d9cfa29b2a5447115419257f8efad811bb647aa2922c3dd64f5c13a90d3125d"} Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.732325 4835 generic.go:334] "Generic (PLEG): container finished" podID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerID="2d39276d62ff338c21add11ebc8211f6b33f5d7b15835598fe73e02e70a76d62" exitCode=0 Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.732396 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-56ncc" event={"ID":"cd498040-6af5-4953-8b1c-ea3803ba1b2a","Type":"ContainerDied","Data":"2d39276d62ff338c21add11ebc8211f6b33f5d7b15835598fe73e02e70a76d62"} Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.736469 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"64046685-4d75-4e50-9645-b359bb38b948","Type":"ContainerDied","Data":"73a76a7c9239b4cfe5dbbcce21a7de255ddb4b99adbe0cc1916f9269f8cf7c1b"} Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.736507 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73a76a7c9239b4cfe5dbbcce21a7de255ddb4b99adbe0cc1916f9269f8cf7c1b" Feb 02 16:52:42 crc kubenswrapper[4835]: I0202 16:52:42.736487 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.025784 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:43 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:43 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:43 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.025839 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.424693 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 16:52:43 crc kubenswrapper[4835]: E0202 16:52:43.424930 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64046685-4d75-4e50-9645-b359bb38b948" containerName="pruner" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.424941 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="64046685-4d75-4e50-9645-b359bb38b948" containerName="pruner" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.425026 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="64046685-4d75-4e50-9645-b359bb38b948" containerName="pruner" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.429611 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.433536 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.434169 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.494604 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.568403 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/020181c9-f118-4579-b4f8-6755c4f74d81-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"020181c9-f118-4579-b4f8-6755c4f74d81\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.568556 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/020181c9-f118-4579-b4f8-6755c4f74d81-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"020181c9-f118-4579-b4f8-6755c4f74d81\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.669756 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/020181c9-f118-4579-b4f8-6755c4f74d81-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"020181c9-f118-4579-b4f8-6755c4f74d81\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.669824 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/020181c9-f118-4579-b4f8-6755c4f74d81-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"020181c9-f118-4579-b4f8-6755c4f74d81\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.669822 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/020181c9-f118-4579-b4f8-6755c4f74d81-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"020181c9-f118-4579-b4f8-6755c4f74d81\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.689249 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/020181c9-f118-4579-b4f8-6755c4f74d81-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"020181c9-f118-4579-b4f8-6755c4f74d81\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.758006 4835 generic.go:334] "Generic (PLEG): container finished" podID="0d67474c-95af-464f-b92a-4f2bc00dd1fd" containerID="5219781c385b6b91b578780f7c113d67d24c583c1db6ec0d1dfa658369805081" exitCode=0 Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.758084 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" event={"ID":"0d67474c-95af-464f-b92a-4f2bc00dd1fd","Type":"ContainerDied","Data":"5219781c385b6b91b578780f7c113d67d24c583c1db6ec0d1dfa658369805081"} Feb 02 16:52:43 crc kubenswrapper[4835]: I0202 16:52:43.802500 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 16:52:44 crc kubenswrapper[4835]: I0202 16:52:44.026315 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:44 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:44 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:44 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:44 crc kubenswrapper[4835]: I0202 16:52:44.026370 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:44 crc kubenswrapper[4835]: I0202 16:52:44.411378 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 16:52:44 crc kubenswrapper[4835]: I0202 16:52:44.779136 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"020181c9-f118-4579-b4f8-6755c4f74d81","Type":"ContainerStarted","Data":"ab79860b3cdb633efb5a8b986c5580c96bedde1d83cdb2b679d15b448b89821d"} Feb 02 16:52:44 crc kubenswrapper[4835]: I0202 16:52:44.872122 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 16:52:44 crc kubenswrapper[4835]: I0202 16:52:44.872222 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.028989 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:45 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:45 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:45 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.029097 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.029352 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.034400 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-wq452" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.133384 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.308506 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hprv6\" (UniqueName: \"kubernetes.io/projected/0d67474c-95af-464f-b92a-4f2bc00dd1fd-kube-api-access-hprv6\") pod \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.308809 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d67474c-95af-464f-b92a-4f2bc00dd1fd-config-volume\") pod \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.308840 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0d67474c-95af-464f-b92a-4f2bc00dd1fd-secret-volume\") pod \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\" (UID: \"0d67474c-95af-464f-b92a-4f2bc00dd1fd\") " Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.310138 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d67474c-95af-464f-b92a-4f2bc00dd1fd-config-volume" (OuterVolumeSpecName: "config-volume") pod "0d67474c-95af-464f-b92a-4f2bc00dd1fd" (UID: "0d67474c-95af-464f-b92a-4f2bc00dd1fd"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.323338 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d67474c-95af-464f-b92a-4f2bc00dd1fd-kube-api-access-hprv6" (OuterVolumeSpecName: "kube-api-access-hprv6") pod "0d67474c-95af-464f-b92a-4f2bc00dd1fd" (UID: "0d67474c-95af-464f-b92a-4f2bc00dd1fd"). InnerVolumeSpecName "kube-api-access-hprv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.334052 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d67474c-95af-464f-b92a-4f2bc00dd1fd-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0d67474c-95af-464f-b92a-4f2bc00dd1fd" (UID: "0d67474c-95af-464f-b92a-4f2bc00dd1fd"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.410412 4835 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d67474c-95af-464f-b92a-4f2bc00dd1fd-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.410460 4835 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0d67474c-95af-464f-b92a-4f2bc00dd1fd-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.410471 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hprv6\" (UniqueName: \"kubernetes.io/projected/0d67474c-95af-464f-b92a-4f2bc00dd1fd-kube-api-access-hprv6\") on node \"crc\" DevicePath \"\"" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.795084 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"020181c9-f118-4579-b4f8-6755c4f74d81","Type":"ContainerStarted","Data":"a2cf4234009955a63695697fb55c33621337927d19d160cdc54feacd11fe8407"} Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.813852 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.816483 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788" event={"ID":"0d67474c-95af-464f-b92a-4f2bc00dd1fd","Type":"ContainerDied","Data":"09e761f66cb5798c0fc433263c3aa401c43d5e36ca7e7dfadf3ee73cfb193ea8"} Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.816529 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09e761f66cb5798c0fc433263c3aa401c43d5e36ca7e7dfadf3ee73cfb193ea8" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.819346 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.8193319199999998 podStartE2EDuration="2.81933192s" podCreationTimestamp="2026-02-02 16:52:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:52:45.816435005 +0000 UTC m=+157.438039085" watchObservedRunningTime="2026-02-02 16:52:45.81933192 +0000 UTC m=+157.440936000" Feb 02 16:52:45 crc kubenswrapper[4835]: I0202 16:52:45.872315 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-9rdhl" Feb 02 16:52:46 crc kubenswrapper[4835]: I0202 16:52:46.024467 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:46 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:46 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:46 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:46 crc kubenswrapper[4835]: I0202 16:52:46.024530 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:46 crc kubenswrapper[4835]: I0202 16:52:46.824621 4835 generic.go:334] "Generic (PLEG): container finished" podID="020181c9-f118-4579-b4f8-6755c4f74d81" containerID="a2cf4234009955a63695697fb55c33621337927d19d160cdc54feacd11fe8407" exitCode=0 Feb 02 16:52:46 crc kubenswrapper[4835]: I0202 16:52:46.824789 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"020181c9-f118-4579-b4f8-6755c4f74d81","Type":"ContainerDied","Data":"a2cf4234009955a63695697fb55c33621337927d19d160cdc54feacd11fe8407"} Feb 02 16:52:47 crc kubenswrapper[4835]: I0202 16:52:47.024568 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:47 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:47 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:47 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:47 crc kubenswrapper[4835]: I0202 16:52:47.024630 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:48 crc kubenswrapper[4835]: I0202 16:52:48.024675 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:48 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:48 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:48 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:48 crc kubenswrapper[4835]: I0202 16:52:48.025644 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:49 crc kubenswrapper[4835]: I0202 16:52:49.024437 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:49 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:49 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:49 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:49 crc kubenswrapper[4835]: I0202 16:52:49.024518 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:50 crc kubenswrapper[4835]: I0202 16:52:50.026746 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:50 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:50 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:50 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:50 crc kubenswrapper[4835]: I0202 16:52:50.026792 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:50 crc kubenswrapper[4835]: I0202 16:52:50.583930 4835 patch_prober.go:28] interesting pod/console-f9d7485db-xpn8c container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.29:8443/health\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Feb 02 16:52:50 crc kubenswrapper[4835]: I0202 16:52:50.583983 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-xpn8c" podUID="aad2f2e8-6800-4238-a0ab-ee3304bad4c1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.29:8443/health\": dial tcp 10.217.0.29:8443: connect: connection refused" Feb 02 16:52:50 crc kubenswrapper[4835]: I0202 16:52:50.672249 4835 patch_prober.go:28] interesting pod/downloads-7954f5f757-tqhv4 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Feb 02 16:52:50 crc kubenswrapper[4835]: I0202 16:52:50.672854 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tqhv4" podUID="2670e3af-3faf-4aa2-8674-ad7b94955ef0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Feb 02 16:52:50 crc kubenswrapper[4835]: I0202 16:52:50.672587 4835 patch_prober.go:28] interesting pod/downloads-7954f5f757-tqhv4 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Feb 02 16:52:50 crc kubenswrapper[4835]: I0202 16:52:50.673114 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-tqhv4" podUID="2670e3af-3faf-4aa2-8674-ad7b94955ef0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Feb 02 16:52:51 crc kubenswrapper[4835]: I0202 16:52:51.025132 4835 patch_prober.go:28] interesting pod/router-default-5444994796-bbqwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 16:52:51 crc kubenswrapper[4835]: [-]has-synced failed: reason withheld Feb 02 16:52:51 crc kubenswrapper[4835]: [+]process-running ok Feb 02 16:52:51 crc kubenswrapper[4835]: healthz check failed Feb 02 16:52:51 crc kubenswrapper[4835]: I0202 16:52:51.025197 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bbqwj" podUID="75c4b353-ddb5-4709-ab21-94059ac83671" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 16:52:51 crc kubenswrapper[4835]: I0202 16:52:51.708856 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:51 crc kubenswrapper[4835]: I0202 16:52:51.714451 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5f2e42e3-ff22-4273-9a65-d7e55792155e-metrics-certs\") pod \"network-metrics-daemon-fbl8t\" (UID: \"5f2e42e3-ff22-4273-9a65-d7e55792155e\") " pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.008914 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fbl8t" Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.028349 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.030675 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-bbqwj" Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.733836 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.822804 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/020181c9-f118-4579-b4f8-6755c4f74d81-kube-api-access\") pod \"020181c9-f118-4579-b4f8-6755c4f74d81\" (UID: \"020181c9-f118-4579-b4f8-6755c4f74d81\") " Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.822875 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/020181c9-f118-4579-b4f8-6755c4f74d81-kubelet-dir\") pod \"020181c9-f118-4579-b4f8-6755c4f74d81\" (UID: \"020181c9-f118-4579-b4f8-6755c4f74d81\") " Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.823102 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/020181c9-f118-4579-b4f8-6755c4f74d81-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "020181c9-f118-4579-b4f8-6755c4f74d81" (UID: "020181c9-f118-4579-b4f8-6755c4f74d81"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.842411 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/020181c9-f118-4579-b4f8-6755c4f74d81-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "020181c9-f118-4579-b4f8-6755c4f74d81" (UID: "020181c9-f118-4579-b4f8-6755c4f74d81"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.877597 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"020181c9-f118-4579-b4f8-6755c4f74d81","Type":"ContainerDied","Data":"ab79860b3cdb633efb5a8b986c5580c96bedde1d83cdb2b679d15b448b89821d"} Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.877645 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab79860b3cdb633efb5a8b986c5580c96bedde1d83cdb2b679d15b448b89821d" Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.877655 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.924373 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/020181c9-f118-4579-b4f8-6755c4f74d81-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 16:52:52 crc kubenswrapper[4835]: I0202 16:52:52.924410 4835 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/020181c9-f118-4579-b4f8-6755c4f74d81-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 16:52:58 crc kubenswrapper[4835]: I0202 16:52:58.393503 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-87z74"] Feb 02 16:52:58 crc kubenswrapper[4835]: I0202 16:52:58.393738 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" podUID="1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" containerName="controller-manager" containerID="cri-o://c46e4fbf2fef9e66eea766dcebc625006248cf6c3424ecebbf3f50aad1b9f40e" gracePeriod=30 Feb 02 16:52:58 crc kubenswrapper[4835]: I0202 16:52:58.417316 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4"] Feb 02 16:52:58 crc kubenswrapper[4835]: I0202 16:52:58.417606 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" podUID="5d31701b-ac62-4bdc-91d5-d9f411f6cf23" containerName="route-controller-manager" containerID="cri-o://5aa6bb928922b9f0ad1ad1291055816bd9a9238803853e50a2d3a00ffd4b0dc1" gracePeriod=30 Feb 02 16:52:58 crc kubenswrapper[4835]: I0202 16:52:58.912260 4835 generic.go:334] "Generic (PLEG): container finished" podID="5d31701b-ac62-4bdc-91d5-d9f411f6cf23" containerID="5aa6bb928922b9f0ad1ad1291055816bd9a9238803853e50a2d3a00ffd4b0dc1" exitCode=0 Feb 02 16:52:58 crc kubenswrapper[4835]: I0202 16:52:58.912348 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" event={"ID":"5d31701b-ac62-4bdc-91d5-d9f411f6cf23","Type":"ContainerDied","Data":"5aa6bb928922b9f0ad1ad1291055816bd9a9238803853e50a2d3a00ffd4b0dc1"} Feb 02 16:52:58 crc kubenswrapper[4835]: I0202 16:52:58.914129 4835 generic.go:334] "Generic (PLEG): container finished" podID="1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" containerID="c46e4fbf2fef9e66eea766dcebc625006248cf6c3424ecebbf3f50aad1b9f40e" exitCode=0 Feb 02 16:52:58 crc kubenswrapper[4835]: I0202 16:52:58.914162 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" event={"ID":"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c","Type":"ContainerDied","Data":"c46e4fbf2fef9e66eea766dcebc625006248cf6c3424ecebbf3f50aad1b9f40e"} Feb 02 16:52:59 crc kubenswrapper[4835]: I0202 16:52:59.560735 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:53:00 crc kubenswrapper[4835]: I0202 16:53:00.073835 4835 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-87z74 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Feb 02 16:53:00 crc kubenswrapper[4835]: I0202 16:53:00.073906 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" podUID="1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Feb 02 16:53:00 crc kubenswrapper[4835]: I0202 16:53:00.515912 4835 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-qqjn4 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Feb 02 16:53:00 crc kubenswrapper[4835]: I0202 16:53:00.515974 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" podUID="5d31701b-ac62-4bdc-91d5-d9f411f6cf23" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Feb 02 16:53:00 crc kubenswrapper[4835]: I0202 16:53:00.587917 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:53:00 crc kubenswrapper[4835]: I0202 16:53:00.598805 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 16:53:00 crc kubenswrapper[4835]: I0202 16:53:00.678291 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-tqhv4" Feb 02 16:53:02 crc kubenswrapper[4835]: I0202 16:53:02.399185 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-fbl8t"] Feb 02 16:53:10 crc kubenswrapper[4835]: I0202 16:53:10.460308 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-zsx6k" Feb 02 16:53:10 crc kubenswrapper[4835]: E0202 16:53:10.732841 4835 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 02 16:53:10 crc kubenswrapper[4835]: E0202 16:53:10.733012 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r76qs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-56ncc_openshift-marketplace(cd498040-6af5-4953-8b1c-ea3803ba1b2a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 16:53:10 crc kubenswrapper[4835]: E0202 16:53:10.734175 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-56ncc" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" Feb 02 16:53:10 crc kubenswrapper[4835]: E0202 16:53:10.866363 4835 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 02 16:53:10 crc kubenswrapper[4835]: E0202 16:53:10.866507 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gms2p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-k2lvh_openshift-marketplace(bf17cfb8-28ee-49e0-b0b3-df8c08b3efed): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 16:53:10 crc kubenswrapper[4835]: E0202 16:53:10.867667 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-k2lvh" podUID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" Feb 02 16:53:10 crc kubenswrapper[4835]: I0202 16:53:10.987258 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" event={"ID":"5f2e42e3-ff22-4273-9a65-d7e55792155e","Type":"ContainerStarted","Data":"39a5e6e59fe28ad0d3e66d9491749434b7ed209ef4eaf1a88b2f86123e3a2039"} Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.075361 4835 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-87z74 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: i/o timeout" start-of-body= Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.076972 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" podUID="1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: i/o timeout" Feb 02 16:53:11 crc kubenswrapper[4835]: E0202 16:53:11.328769 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-k2lvh" podUID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" Feb 02 16:53:11 crc kubenswrapper[4835]: E0202 16:53:11.329105 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-56ncc" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.426747 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.474129 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg"] Feb 02 16:53:11 crc kubenswrapper[4835]: E0202 16:53:11.475716 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d31701b-ac62-4bdc-91d5-d9f411f6cf23" containerName="route-controller-manager" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.475751 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d31701b-ac62-4bdc-91d5-d9f411f6cf23" containerName="route-controller-manager" Feb 02 16:53:11 crc kubenswrapper[4835]: E0202 16:53:11.475768 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="020181c9-f118-4579-b4f8-6755c4f74d81" containerName="pruner" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.475777 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="020181c9-f118-4579-b4f8-6755c4f74d81" containerName="pruner" Feb 02 16:53:11 crc kubenswrapper[4835]: E0202 16:53:11.475796 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d67474c-95af-464f-b92a-4f2bc00dd1fd" containerName="collect-profiles" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.475806 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d67474c-95af-464f-b92a-4f2bc00dd1fd" containerName="collect-profiles" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.475933 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="020181c9-f118-4579-b4f8-6755c4f74d81" containerName="pruner" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.475948 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d67474c-95af-464f-b92a-4f2bc00dd1fd" containerName="collect-profiles" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.475962 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d31701b-ac62-4bdc-91d5-d9f411f6cf23" containerName="route-controller-manager" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.476551 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg"] Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.476666 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.483756 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.515722 4835 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-qqjn4 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.515769 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" podUID="5d31701b-ac62-4bdc-91d5-d9f411f6cf23" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.582817 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jfvj\" (UniqueName: \"kubernetes.io/projected/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-kube-api-access-7jfvj\") pod \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.582892 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-serving-cert\") pod \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.582934 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-serving-cert\") pod \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.582959 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-client-ca\") pod \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.582980 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-client-ca\") pod \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.583002 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hcjb\" (UniqueName: \"kubernetes.io/projected/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-kube-api-access-2hcjb\") pod \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.583055 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-config\") pod \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.583076 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-config\") pod \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\" (UID: \"5d31701b-ac62-4bdc-91d5-d9f411f6cf23\") " Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.583105 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-proxy-ca-bundles\") pod \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\" (UID: \"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c\") " Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.583245 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-client-ca\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.583343 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh9fk\" (UniqueName: \"kubernetes.io/projected/dc821989-5bfa-4da2-be1a-d5cfbd46285f-kube-api-access-gh9fk\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.583382 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-config\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.583406 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc821989-5bfa-4da2-be1a-d5cfbd46285f-serving-cert\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.584712 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-config" (OuterVolumeSpecName: "config") pod "5d31701b-ac62-4bdc-91d5-d9f411f6cf23" (UID: "5d31701b-ac62-4bdc-91d5-d9f411f6cf23"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.585374 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-client-ca" (OuterVolumeSpecName: "client-ca") pod "1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" (UID: "1ec2a02d-bc93-409e-9fa1-6498f5da8b1c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.585774 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" (UID: "1ec2a02d-bc93-409e-9fa1-6498f5da8b1c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.586125 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-config" (OuterVolumeSpecName: "config") pod "1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" (UID: "1ec2a02d-bc93-409e-9fa1-6498f5da8b1c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.587471 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-client-ca" (OuterVolumeSpecName: "client-ca") pod "5d31701b-ac62-4bdc-91d5-d9f411f6cf23" (UID: "5d31701b-ac62-4bdc-91d5-d9f411f6cf23"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.590924 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-kube-api-access-7jfvj" (OuterVolumeSpecName: "kube-api-access-7jfvj") pod "1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" (UID: "1ec2a02d-bc93-409e-9fa1-6498f5da8b1c"). InnerVolumeSpecName "kube-api-access-7jfvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.591890 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5d31701b-ac62-4bdc-91d5-d9f411f6cf23" (UID: "5d31701b-ac62-4bdc-91d5-d9f411f6cf23"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.592067 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-kube-api-access-2hcjb" (OuterVolumeSpecName: "kube-api-access-2hcjb") pod "5d31701b-ac62-4bdc-91d5-d9f411f6cf23" (UID: "5d31701b-ac62-4bdc-91d5-d9f411f6cf23"). InnerVolumeSpecName "kube-api-access-2hcjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.596372 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" (UID: "1ec2a02d-bc93-409e-9fa1-6498f5da8b1c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685000 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-client-ca\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685058 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh9fk\" (UniqueName: \"kubernetes.io/projected/dc821989-5bfa-4da2-be1a-d5cfbd46285f-kube-api-access-gh9fk\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685092 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-config\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685117 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc821989-5bfa-4da2-be1a-d5cfbd46285f-serving-cert\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685169 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685182 4835 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685191 4835 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685199 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hcjb\" (UniqueName: \"kubernetes.io/projected/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-kube-api-access-2hcjb\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685210 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685218 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d31701b-ac62-4bdc-91d5-d9f411f6cf23-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685226 4835 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685233 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jfvj\" (UniqueName: \"kubernetes.io/projected/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-kube-api-access-7jfvj\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685241 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.685997 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-client-ca\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.686863 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-config\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.690976 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc821989-5bfa-4da2-be1a-d5cfbd46285f-serving-cert\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.705514 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh9fk\" (UniqueName: \"kubernetes.io/projected/dc821989-5bfa-4da2-be1a-d5cfbd46285f-kube-api-access-gh9fk\") pod \"route-controller-manager-6996cb79cf-4kvwg\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.849641 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.994403 4835 generic.go:334] "Generic (PLEG): container finished" podID="a810d18d-1792-4733-9325-5960e398f425" containerID="d9a72eb82fa9c9df9ef4b5e54078a5e546d048212554a8d86eb20731272ec132" exitCode=0 Feb 02 16:53:11 crc kubenswrapper[4835]: I0202 16:53:11.994520 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9gmt" event={"ID":"a810d18d-1792-4733-9325-5960e398f425","Type":"ContainerDied","Data":"d9a72eb82fa9c9df9ef4b5e54078a5e546d048212554a8d86eb20731272ec132"} Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.004611 4835 generic.go:334] "Generic (PLEG): container finished" podID="4b92b257-9045-493d-9c64-0e3660e8513a" containerID="bec2a76b8a9774c0badf633d0461214631e35effac3fd9eb5ccfb55598915b8c" exitCode=0 Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.004714 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w9l6m" event={"ID":"4b92b257-9045-493d-9c64-0e3660e8513a","Type":"ContainerDied","Data":"bec2a76b8a9774c0badf633d0461214631e35effac3fd9eb5ccfb55598915b8c"} Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.007584 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.007584 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4" event={"ID":"5d31701b-ac62-4bdc-91d5-d9f411f6cf23","Type":"ContainerDied","Data":"476c43b7d8627952d3b500f6daced3f7cb628f3f0e90e0c21a7e6a79bdc0ffd1"} Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.007836 4835 scope.go:117] "RemoveContainer" containerID="5aa6bb928922b9f0ad1ad1291055816bd9a9238803853e50a2d3a00ffd4b0dc1" Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.029264 4835 generic.go:334] "Generic (PLEG): container finished" podID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" containerID="9379c05dd3c7e26372f2c7b1b7d2b9eee211d02eafec34696c4ca623d5efa2f7" exitCode=0 Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.029394 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9mlm" event={"ID":"b4ac95a5-3112-4034-ab53-40d9c5cf13ac","Type":"ContainerDied","Data":"9379c05dd3c7e26372f2c7b1b7d2b9eee211d02eafec34696c4ca623d5efa2f7"} Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.038400 4835 generic.go:334] "Generic (PLEG): container finished" podID="56962d65-7f11-44f4-b09c-73302933d1a8" containerID="b076040a42fb11f3d2789b69e3bc695e34f3ccf6e0377cb5c36c5e86199cf474" exitCode=0 Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.038517 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8bccz" event={"ID":"56962d65-7f11-44f4-b09c-73302933d1a8","Type":"ContainerDied","Data":"b076040a42fb11f3d2789b69e3bc695e34f3ccf6e0377cb5c36c5e86199cf474"} Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.040509 4835 generic.go:334] "Generic (PLEG): container finished" podID="29dff398-e620-4558-854e-3e9fb13f1b25" containerID="a7017c1af59e80e278513dcde31721d4e63f2a2ab0a5c6e08480b5f97b5d66c9" exitCode=0 Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.040547 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5cbxv" event={"ID":"29dff398-e620-4558-854e-3e9fb13f1b25","Type":"ContainerDied","Data":"a7017c1af59e80e278513dcde31721d4e63f2a2ab0a5c6e08480b5f97b5d66c9"} Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.042628 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" event={"ID":"1ec2a02d-bc93-409e-9fa1-6498f5da8b1c","Type":"ContainerDied","Data":"940e7a02adc0a26daf7292a2e7eb95fb8b282e3c976084f3d5483b3cbb0df26d"} Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.042660 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-87z74" Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.050347 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" event={"ID":"5f2e42e3-ff22-4273-9a65-d7e55792155e","Type":"ContainerStarted","Data":"18d22ad1345daefb335404d169cb6f66f25599e0a2fefddbb36bea6823f5b73a"} Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.053148 4835 generic.go:334] "Generic (PLEG): container finished" podID="5eecd945-3eb3-4384-9836-c1a65b49063f" containerID="db1c0b746df017128cec2795af3413c1971203c8b583d9383b34228f4bcb6af6" exitCode=0 Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.053177 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jq6s4" event={"ID":"5eecd945-3eb3-4384-9836-c1a65b49063f","Type":"ContainerDied","Data":"db1c0b746df017128cec2795af3413c1971203c8b583d9383b34228f4bcb6af6"} Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.128313 4835 scope.go:117] "RemoveContainer" containerID="c46e4fbf2fef9e66eea766dcebc625006248cf6c3424ecebbf3f50aad1b9f40e" Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.152063 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4"] Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.156757 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qqjn4"] Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.171549 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-87z74"] Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.174717 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-87z74"] Feb 02 16:53:12 crc kubenswrapper[4835]: I0202 16:53:12.246526 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg"] Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.061310 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9gmt" event={"ID":"a810d18d-1792-4733-9325-5960e398f425","Type":"ContainerStarted","Data":"9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590"} Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.063930 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w9l6m" event={"ID":"4b92b257-9045-493d-9c64-0e3660e8513a","Type":"ContainerStarted","Data":"9d7290b15650e0224e45602dc53526d231b8653eb2eaad04effb640d21eecf5d"} Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.068758 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9mlm" event={"ID":"b4ac95a5-3112-4034-ab53-40d9c5cf13ac","Type":"ContainerStarted","Data":"2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90"} Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.071036 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5cbxv" event={"ID":"29dff398-e620-4558-854e-3e9fb13f1b25","Type":"ContainerStarted","Data":"01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859"} Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.072505 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" event={"ID":"dc821989-5bfa-4da2-be1a-d5cfbd46285f","Type":"ContainerStarted","Data":"ab1c6627a1686ed6d7f98cec674315744dc2b89010a9ff4a8dbde989cc748488"} Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.072549 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" event={"ID":"dc821989-5bfa-4da2-be1a-d5cfbd46285f","Type":"ContainerStarted","Data":"11a2bf76205391507ce09c04a5f33356b6d3ab0536ec52ac30578d66935f9f83"} Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.072708 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.074310 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fbl8t" event={"ID":"5f2e42e3-ff22-4273-9a65-d7e55792155e","Type":"ContainerStarted","Data":"f956e435d5e5efc69ae5031c09de55edd117da16597bf66836f4430e44ff42ca"} Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.076266 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jq6s4" event={"ID":"5eecd945-3eb3-4384-9836-c1a65b49063f","Type":"ContainerStarted","Data":"30811e6f9bcb61a9d17661fefb50cb5dd8bb4fcb3648f82c284c2712ab21d3f4"} Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.077797 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8bccz" event={"ID":"56962d65-7f11-44f4-b09c-73302933d1a8","Type":"ContainerStarted","Data":"a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65"} Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.086495 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v9gmt" podStartSLOduration=3.359877206 podStartE2EDuration="35.086477789s" podCreationTimestamp="2026-02-02 16:52:38 +0000 UTC" firstStartedPulling="2026-02-02 16:52:40.659458027 +0000 UTC m=+152.281062107" lastFinishedPulling="2026-02-02 16:53:12.38605861 +0000 UTC m=+184.007662690" observedRunningTime="2026-02-02 16:53:13.085263153 +0000 UTC m=+184.706867233" watchObservedRunningTime="2026-02-02 16:53:13.086477789 +0000 UTC m=+184.708081869" Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.109244 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5cbxv" podStartSLOduration=3.068252906 podStartE2EDuration="36.109214277s" podCreationTimestamp="2026-02-02 16:52:37 +0000 UTC" firstStartedPulling="2026-02-02 16:52:39.572422448 +0000 UTC m=+151.194026528" lastFinishedPulling="2026-02-02 16:53:12.613383819 +0000 UTC m=+184.234987899" observedRunningTime="2026-02-02 16:53:13.105475457 +0000 UTC m=+184.727079537" watchObservedRunningTime="2026-02-02 16:53:13.109214277 +0000 UTC m=+184.730818357" Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.137641 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8bccz" podStartSLOduration=3.177491999 podStartE2EDuration="34.137620652s" podCreationTimestamp="2026-02-02 16:52:39 +0000 UTC" firstStartedPulling="2026-02-02 16:52:41.719970326 +0000 UTC m=+153.341574406" lastFinishedPulling="2026-02-02 16:53:12.680098979 +0000 UTC m=+184.301703059" observedRunningTime="2026-02-02 16:53:13.134111479 +0000 UTC m=+184.755715569" watchObservedRunningTime="2026-02-02 16:53:13.137620652 +0000 UTC m=+184.759224732" Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.161147 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" podStartSLOduration=15.161131543 podStartE2EDuration="15.161131543s" podCreationTimestamp="2026-02-02 16:52:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:53:13.157495436 +0000 UTC m=+184.779099516" watchObservedRunningTime="2026-02-02 16:53:13.161131543 +0000 UTC m=+184.782735623" Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.177191 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-fbl8t" podStartSLOduration=164.177177724 podStartE2EDuration="2m44.177177724s" podCreationTimestamp="2026-02-02 16:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:53:13.176210746 +0000 UTC m=+184.797814826" watchObservedRunningTime="2026-02-02 16:53:13.177177724 +0000 UTC m=+184.798781804" Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.195782 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" path="/var/lib/kubelet/pods/1ec2a02d-bc93-409e-9fa1-6498f5da8b1c/volumes" Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.196473 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d31701b-ac62-4bdc-91d5-d9f411f6cf23" path="/var/lib/kubelet/pods/5d31701b-ac62-4bdc-91d5-d9f411f6cf23/volumes" Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.197218 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q9mlm" podStartSLOduration=3.249474942 podStartE2EDuration="36.197193742s" podCreationTimestamp="2026-02-02 16:52:37 +0000 UTC" firstStartedPulling="2026-02-02 16:52:39.585436381 +0000 UTC m=+151.207040461" lastFinishedPulling="2026-02-02 16:53:12.533155181 +0000 UTC m=+184.154759261" observedRunningTime="2026-02-02 16:53:13.19575338 +0000 UTC m=+184.817357460" watchObservedRunningTime="2026-02-02 16:53:13.197193742 +0000 UTC m=+184.818797822" Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.225660 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w9l6m" podStartSLOduration=2.487952689 podStartE2EDuration="34.225624377s" podCreationTimestamp="2026-02-02 16:52:39 +0000 UTC" firstStartedPulling="2026-02-02 16:52:40.706818248 +0000 UTC m=+152.328422328" lastFinishedPulling="2026-02-02 16:53:12.444489936 +0000 UTC m=+184.066094016" observedRunningTime="2026-02-02 16:53:13.221379793 +0000 UTC m=+184.842983883" watchObservedRunningTime="2026-02-02 16:53:13.225624377 +0000 UTC m=+184.847228457" Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.250423 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jq6s4" podStartSLOduration=3.369948292 podStartE2EDuration="36.250403426s" podCreationTimestamp="2026-02-02 16:52:37 +0000 UTC" firstStartedPulling="2026-02-02 16:52:39.579317371 +0000 UTC m=+151.200921441" lastFinishedPulling="2026-02-02 16:53:12.459772495 +0000 UTC m=+184.081376575" observedRunningTime="2026-02-02 16:53:13.24816529 +0000 UTC m=+184.869769380" watchObservedRunningTime="2026-02-02 16:53:13.250403426 +0000 UTC m=+184.872007506" Feb 02 16:53:13 crc kubenswrapper[4835]: I0202 16:53:13.264575 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.407571 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-8cbc6d589-bxn9g"] Feb 02 16:53:14 crc kubenswrapper[4835]: E0202 16:53:14.408450 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" containerName="controller-manager" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.408560 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" containerName="controller-manager" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.408731 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ec2a02d-bc93-409e-9fa1-6498f5da8b1c" containerName="controller-manager" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.409241 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.411616 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.411929 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.411986 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.412063 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.413943 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.419568 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.423940 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.427602 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-8cbc6d589-bxn9g"] Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.521585 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-proxy-ca-bundles\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.521661 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-client-ca\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.521796 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63c4c013-f3a6-4b87-b6ff-54b17a705e56-serving-cert\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.521919 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg576\" (UniqueName: \"kubernetes.io/projected/63c4c013-f3a6-4b87-b6ff-54b17a705e56-kube-api-access-rg576\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.521958 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-config\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.623150 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-proxy-ca-bundles\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.623225 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-client-ca\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.624207 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-client-ca\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.624414 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63c4c013-f3a6-4b87-b6ff-54b17a705e56-serving-cert\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.624481 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg576\" (UniqueName: \"kubernetes.io/projected/63c4c013-f3a6-4b87-b6ff-54b17a705e56-kube-api-access-rg576\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.624515 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-config\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.624880 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-proxy-ca-bundles\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.626352 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-config\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.628440 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63c4c013-f3a6-4b87-b6ff-54b17a705e56-serving-cert\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.646585 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg576\" (UniqueName: \"kubernetes.io/projected/63c4c013-f3a6-4b87-b6ff-54b17a705e56-kube-api-access-rg576\") pod \"controller-manager-8cbc6d589-bxn9g\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.726415 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.870668 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.870736 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 16:53:14 crc kubenswrapper[4835]: I0202 16:53:14.986490 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-8cbc6d589-bxn9g"] Feb 02 16:53:15 crc kubenswrapper[4835]: I0202 16:53:15.091013 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" event={"ID":"63c4c013-f3a6-4b87-b6ff-54b17a705e56","Type":"ContainerStarted","Data":"387f782515f415a95863a6386e1de1dc1676df77c2126afabf7e0cd403d09c50"} Feb 02 16:53:16 crc kubenswrapper[4835]: I0202 16:53:16.097821 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" event={"ID":"63c4c013-f3a6-4b87-b6ff-54b17a705e56","Type":"ContainerStarted","Data":"a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e"} Feb 02 16:53:16 crc kubenswrapper[4835]: I0202 16:53:16.098192 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:16 crc kubenswrapper[4835]: I0202 16:53:16.109078 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:16 crc kubenswrapper[4835]: I0202 16:53:16.118343 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" podStartSLOduration=18.118265408 podStartE2EDuration="18.118265408s" podCreationTimestamp="2026-02-02 16:52:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:53:16.114675393 +0000 UTC m=+187.736279493" watchObservedRunningTime="2026-02-02 16:53:16.118265408 +0000 UTC m=+187.739869488" Feb 02 16:53:17 crc kubenswrapper[4835]: I0202 16:53:17.327490 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 16:53:17 crc kubenswrapper[4835]: I0202 16:53:17.811923 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:53:17 crc kubenswrapper[4835]: I0202 16:53:17.811994 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.238781 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.238828 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.317121 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8cbc6d589-bxn9g"] Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.321017 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.321101 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.337911 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.342929 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.376212 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.389788 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.425739 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg"] Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.426041 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" podUID="dc821989-5bfa-4da2-be1a-d5cfbd46285f" containerName="route-controller-manager" containerID="cri-o://ab1c6627a1686ed6d7f98cec674315744dc2b89010a9ff4a8dbde989cc748488" gracePeriod=30 Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.429324 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.429510 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:53:18 crc kubenswrapper[4835]: I0202 16:53:18.523573 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.124253 4835 generic.go:334] "Generic (PLEG): container finished" podID="dc821989-5bfa-4da2-be1a-d5cfbd46285f" containerID="ab1c6627a1686ed6d7f98cec674315744dc2b89010a9ff4a8dbde989cc748488" exitCode=0 Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.124370 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" event={"ID":"dc821989-5bfa-4da2-be1a-d5cfbd46285f","Type":"ContainerDied","Data":"ab1c6627a1686ed6d7f98cec674315744dc2b89010a9ff4a8dbde989cc748488"} Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.125027 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" podUID="63c4c013-f3a6-4b87-b6ff-54b17a705e56" containerName="controller-manager" containerID="cri-o://a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e" gracePeriod=30 Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.172166 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.173774 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.174185 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.355217 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.392537 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc821989-5bfa-4da2-be1a-d5cfbd46285f-serving-cert\") pod \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.392597 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh9fk\" (UniqueName: \"kubernetes.io/projected/dc821989-5bfa-4da2-be1a-d5cfbd46285f-kube-api-access-gh9fk\") pod \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.392625 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-client-ca\") pod \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.392729 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-config\") pod \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\" (UID: \"dc821989-5bfa-4da2-be1a-d5cfbd46285f\") " Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.393687 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-client-ca" (OuterVolumeSpecName: "client-ca") pod "dc821989-5bfa-4da2-be1a-d5cfbd46285f" (UID: "dc821989-5bfa-4da2-be1a-d5cfbd46285f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.394028 4835 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.394119 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-config" (OuterVolumeSpecName: "config") pod "dc821989-5bfa-4da2-be1a-d5cfbd46285f" (UID: "dc821989-5bfa-4da2-be1a-d5cfbd46285f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.406895 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc821989-5bfa-4da2-be1a-d5cfbd46285f-kube-api-access-gh9fk" (OuterVolumeSpecName: "kube-api-access-gh9fk") pod "dc821989-5bfa-4da2-be1a-d5cfbd46285f" (UID: "dc821989-5bfa-4da2-be1a-d5cfbd46285f"). InnerVolumeSpecName "kube-api-access-gh9fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.415957 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc821989-5bfa-4da2-be1a-d5cfbd46285f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "dc821989-5bfa-4da2-be1a-d5cfbd46285f" (UID: "dc821989-5bfa-4da2-be1a-d5cfbd46285f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.423760 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 16:53:19 crc kubenswrapper[4835]: E0202 16:53:19.424233 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc821989-5bfa-4da2-be1a-d5cfbd46285f" containerName="route-controller-manager" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.424249 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc821989-5bfa-4da2-be1a-d5cfbd46285f" containerName="route-controller-manager" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.424404 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc821989-5bfa-4da2-be1a-d5cfbd46285f" containerName="route-controller-manager" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.424940 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.444143 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.444333 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.453786 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.495602 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"60f72013-46a0-450d-9cf5-311eeaf4f0b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.495727 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"60f72013-46a0-450d-9cf5-311eeaf4f0b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.495782 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc821989-5bfa-4da2-be1a-d5cfbd46285f-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.495792 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc821989-5bfa-4da2-be1a-d5cfbd46285f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.495804 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh9fk\" (UniqueName: \"kubernetes.io/projected/dc821989-5bfa-4da2-be1a-d5cfbd46285f-kube-api-access-gh9fk\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.553753 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.596367 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63c4c013-f3a6-4b87-b6ff-54b17a705e56-serving-cert\") pod \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.596462 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rg576\" (UniqueName: \"kubernetes.io/projected/63c4c013-f3a6-4b87-b6ff-54b17a705e56-kube-api-access-rg576\") pod \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.596515 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-proxy-ca-bundles\") pod \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.596567 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-config\") pod \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.596671 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-client-ca\") pod \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\" (UID: \"63c4c013-f3a6-4b87-b6ff-54b17a705e56\") " Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.596859 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"60f72013-46a0-450d-9cf5-311eeaf4f0b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.596960 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"60f72013-46a0-450d-9cf5-311eeaf4f0b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.597395 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"60f72013-46a0-450d-9cf5-311eeaf4f0b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.597392 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "63c4c013-f3a6-4b87-b6ff-54b17a705e56" (UID: "63c4c013-f3a6-4b87-b6ff-54b17a705e56"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.597473 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-client-ca" (OuterVolumeSpecName: "client-ca") pod "63c4c013-f3a6-4b87-b6ff-54b17a705e56" (UID: "63c4c013-f3a6-4b87-b6ff-54b17a705e56"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.598061 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-config" (OuterVolumeSpecName: "config") pod "63c4c013-f3a6-4b87-b6ff-54b17a705e56" (UID: "63c4c013-f3a6-4b87-b6ff-54b17a705e56"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.600949 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63c4c013-f3a6-4b87-b6ff-54b17a705e56-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "63c4c013-f3a6-4b87-b6ff-54b17a705e56" (UID: "63c4c013-f3a6-4b87-b6ff-54b17a705e56"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.603475 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63c4c013-f3a6-4b87-b6ff-54b17a705e56-kube-api-access-rg576" (OuterVolumeSpecName: "kube-api-access-rg576") pod "63c4c013-f3a6-4b87-b6ff-54b17a705e56" (UID: "63c4c013-f3a6-4b87-b6ff-54b17a705e56"). InnerVolumeSpecName "kube-api-access-rg576". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.621508 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"60f72013-46a0-450d-9cf5-311eeaf4f0b0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.698667 4835 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.698746 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63c4c013-f3a6-4b87-b6ff-54b17a705e56-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.698789 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rg576\" (UniqueName: \"kubernetes.io/projected/63c4c013-f3a6-4b87-b6ff-54b17a705e56-kube-api-access-rg576\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.698808 4835 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.698830 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63c4c013-f3a6-4b87-b6ff-54b17a705e56-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.767080 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.844487 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.844566 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:53:19 crc kubenswrapper[4835]: I0202 16:53:19.921980 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.027246 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q9mlm"] Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.134259 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" event={"ID":"dc821989-5bfa-4da2-be1a-d5cfbd46285f","Type":"ContainerDied","Data":"11a2bf76205391507ce09c04a5f33356b6d3ab0536ec52ac30578d66935f9f83"} Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.134470 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.137447 4835 generic.go:334] "Generic (PLEG): container finished" podID="63c4c013-f3a6-4b87-b6ff-54b17a705e56" containerID="a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e" exitCode=0 Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.137806 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" event={"ID":"63c4c013-f3a6-4b87-b6ff-54b17a705e56","Type":"ContainerDied","Data":"a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e"} Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.138095 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" event={"ID":"63c4c013-f3a6-4b87-b6ff-54b17a705e56","Type":"ContainerDied","Data":"387f782515f415a95863a6386e1de1dc1676df77c2126afabf7e0cd403d09c50"} Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.138967 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8cbc6d589-bxn9g" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.139425 4835 scope.go:117] "RemoveContainer" containerID="ab1c6627a1686ed6d7f98cec674315744dc2b89010a9ff4a8dbde989cc748488" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.157445 4835 scope.go:117] "RemoveContainer" containerID="a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.176183 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg"] Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.183242 4835 scope.go:117] "RemoveContainer" containerID="a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.183438 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:53:20 crc kubenswrapper[4835]: E0202 16:53:20.184908 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e\": container with ID starting with a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e not found: ID does not exist" containerID="a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.184949 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e"} err="failed to get container status \"a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e\": rpc error: code = NotFound desc = could not find container \"a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e\": container with ID starting with a89af6e5f6fc52f59967756462a4d31fa8330f7f314380a6a89770287f85776e not found: ID does not exist" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.191066 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6996cb79cf-4kvwg"] Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.192177 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.193200 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.193599 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8cbc6d589-bxn9g"] Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.203600 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-8cbc6d589-bxn9g"] Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.228588 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 16:53:20 crc kubenswrapper[4835]: W0202 16:53:20.239494 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod60f72013_46a0_450d_9cf5_311eeaf4f0b0.slice/crio-3e25337c628b5fdea3e0c16e4e37b00e8ecacd8026da6a2ae10d9ac6bead9183 WatchSource:0}: Error finding container 3e25337c628b5fdea3e0c16e4e37b00e8ecacd8026da6a2ae10d9ac6bead9183: Status 404 returned error can't find the container with id 3e25337c628b5fdea3e0c16e4e37b00e8ecacd8026da6a2ae10d9ac6bead9183 Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.243000 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.431850 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc"] Feb 02 16:53:20 crc kubenswrapper[4835]: E0202 16:53:20.434180 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63c4c013-f3a6-4b87-b6ff-54b17a705e56" containerName="controller-manager" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.434215 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="63c4c013-f3a6-4b87-b6ff-54b17a705e56" containerName="controller-manager" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.434421 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="63c4c013-f3a6-4b87-b6ff-54b17a705e56" containerName="controller-manager" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.435210 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.438021 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.438400 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.438514 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.438781 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.439074 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.439630 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.444570 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc"] Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.511496 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzw2q\" (UniqueName: \"kubernetes.io/projected/afd1976e-b8b2-40a7-8281-c62ed81f7e39-kube-api-access-tzw2q\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.511564 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-client-ca\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.511596 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afd1976e-b8b2-40a7-8281-c62ed81f7e39-serving-cert\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.511638 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-config\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.613790 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-config\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.613896 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzw2q\" (UniqueName: \"kubernetes.io/projected/afd1976e-b8b2-40a7-8281-c62ed81f7e39-kube-api-access-tzw2q\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.613934 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-client-ca\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.613966 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afd1976e-b8b2-40a7-8281-c62ed81f7e39-serving-cert\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.616266 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-client-ca\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.617834 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-config\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.620660 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afd1976e-b8b2-40a7-8281-c62ed81f7e39-serving-cert\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.624146 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v9gmt"] Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.641818 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzw2q\" (UniqueName: \"kubernetes.io/projected/afd1976e-b8b2-40a7-8281-c62ed81f7e39-kube-api-access-tzw2q\") pod \"route-controller-manager-74c9f58944-vcngc\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:20 crc kubenswrapper[4835]: I0202 16:53:20.766888 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.146351 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"60f72013-46a0-450d-9cf5-311eeaf4f0b0","Type":"ContainerStarted","Data":"0bd27734378371c6ff3521dff87fa90f5eb46080baa53ca7410d2fbe94737b4a"} Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.146744 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"60f72013-46a0-450d-9cf5-311eeaf4f0b0","Type":"ContainerStarted","Data":"3e25337c628b5fdea3e0c16e4e37b00e8ecacd8026da6a2ae10d9ac6bead9183"} Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.148368 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q9mlm" podUID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" containerName="registry-server" containerID="cri-o://2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90" gracePeriod=2 Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.157012 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc"] Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.165425 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.165404831 podStartE2EDuration="2.165404831s" podCreationTimestamp="2026-02-02 16:53:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:53:21.160573109 +0000 UTC m=+192.782177199" watchObservedRunningTime="2026-02-02 16:53:21.165404831 +0000 UTC m=+192.787008911" Feb 02 16:53:21 crc kubenswrapper[4835]: W0202 16:53:21.171099 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafd1976e_b8b2_40a7_8281_c62ed81f7e39.slice/crio-08731bfaff330546e6ac192d55ee8906a51ab96fc91d28d1e91a80b8c62a178b WatchSource:0}: Error finding container 08731bfaff330546e6ac192d55ee8906a51ab96fc91d28d1e91a80b8c62a178b: Status 404 returned error can't find the container with id 08731bfaff330546e6ac192d55ee8906a51ab96fc91d28d1e91a80b8c62a178b Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.198595 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63c4c013-f3a6-4b87-b6ff-54b17a705e56" path="/var/lib/kubelet/pods/63c4c013-f3a6-4b87-b6ff-54b17a705e56/volumes" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.199358 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc821989-5bfa-4da2-be1a-d5cfbd46285f" path="/var/lib/kubelet/pods/dc821989-5bfa-4da2-be1a-d5cfbd46285f/volumes" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.208090 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.421842 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-66cb587848-zftz8"] Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.423122 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.426079 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.426794 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.426950 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.428238 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.428388 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.429343 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.434862 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.438952 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66cb587848-zftz8"] Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.496781 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.525955 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-proxy-ca-bundles\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.526012 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-config\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.526058 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-client-ca\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.526094 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f1039f1-a16f-4312-9e96-190abbc6f498-serving-cert\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.526111 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2snr\" (UniqueName: \"kubernetes.io/projected/4f1039f1-a16f-4312-9e96-190abbc6f498-kube-api-access-x2snr\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.626720 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-utilities\") pod \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.626810 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-478rx\" (UniqueName: \"kubernetes.io/projected/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-kube-api-access-478rx\") pod \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.626834 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-catalog-content\") pod \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\" (UID: \"b4ac95a5-3112-4034-ab53-40d9c5cf13ac\") " Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.627114 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-client-ca\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.627167 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f1039f1-a16f-4312-9e96-190abbc6f498-serving-cert\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.627189 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2snr\" (UniqueName: \"kubernetes.io/projected/4f1039f1-a16f-4312-9e96-190abbc6f498-kube-api-access-x2snr\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.627212 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-proxy-ca-bundles\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.627241 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-config\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.628940 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-config\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.630979 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-client-ca\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.631101 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-proxy-ca-bundles\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.631625 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-utilities" (OuterVolumeSpecName: "utilities") pod "b4ac95a5-3112-4034-ab53-40d9c5cf13ac" (UID: "b4ac95a5-3112-4034-ab53-40d9c5cf13ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.636567 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-kube-api-access-478rx" (OuterVolumeSpecName: "kube-api-access-478rx") pod "b4ac95a5-3112-4034-ab53-40d9c5cf13ac" (UID: "b4ac95a5-3112-4034-ab53-40d9c5cf13ac"). InnerVolumeSpecName "kube-api-access-478rx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.638190 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f1039f1-a16f-4312-9e96-190abbc6f498-serving-cert\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.649012 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2snr\" (UniqueName: \"kubernetes.io/projected/4f1039f1-a16f-4312-9e96-190abbc6f498-kube-api-access-x2snr\") pod \"controller-manager-66cb587848-zftz8\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.696474 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4ac95a5-3112-4034-ab53-40d9c5cf13ac" (UID: "b4ac95a5-3112-4034-ab53-40d9c5cf13ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.728489 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.728618 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-478rx\" (UniqueName: \"kubernetes.io/projected/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-kube-api-access-478rx\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.728682 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4ac95a5-3112-4034-ab53-40d9c5cf13ac-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:21 crc kubenswrapper[4835]: I0202 16:53:21.747044 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.078375 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66cb587848-zftz8"] Feb 02 16:53:22 crc kubenswrapper[4835]: W0202 16:53:22.094747 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f1039f1_a16f_4312_9e96_190abbc6f498.slice/crio-951f63895ca42c122248308fbf45ece3de2d0b1adfd0d49e056a0ba4ddea3fcb WatchSource:0}: Error finding container 951f63895ca42c122248308fbf45ece3de2d0b1adfd0d49e056a0ba4ddea3fcb: Status 404 returned error can't find the container with id 951f63895ca42c122248308fbf45ece3de2d0b1adfd0d49e056a0ba4ddea3fcb Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.175424 4835 generic.go:334] "Generic (PLEG): container finished" podID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" containerID="2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90" exitCode=0 Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.175490 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9mlm" event={"ID":"b4ac95a5-3112-4034-ab53-40d9c5cf13ac","Type":"ContainerDied","Data":"2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90"} Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.175521 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9mlm" event={"ID":"b4ac95a5-3112-4034-ab53-40d9c5cf13ac","Type":"ContainerDied","Data":"848245bf5926a9c355ece925a068a8a54782782e4b85cb32c368857ccf7dcd17"} Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.175542 4835 scope.go:117] "RemoveContainer" containerID="2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.175660 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9mlm" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.196072 4835 generic.go:334] "Generic (PLEG): container finished" podID="60f72013-46a0-450d-9cf5-311eeaf4f0b0" containerID="0bd27734378371c6ff3521dff87fa90f5eb46080baa53ca7410d2fbe94737b4a" exitCode=0 Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.196239 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"60f72013-46a0-450d-9cf5-311eeaf4f0b0","Type":"ContainerDied","Data":"0bd27734378371c6ff3521dff87fa90f5eb46080baa53ca7410d2fbe94737b4a"} Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.208287 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q9mlm"] Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.216953 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q9mlm"] Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.227774 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" event={"ID":"afd1976e-b8b2-40a7-8281-c62ed81f7e39","Type":"ContainerStarted","Data":"e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37"} Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.228060 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" event={"ID":"afd1976e-b8b2-40a7-8281-c62ed81f7e39","Type":"ContainerStarted","Data":"08731bfaff330546e6ac192d55ee8906a51ab96fc91d28d1e91a80b8c62a178b"} Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.229430 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.231711 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v9gmt" podUID="a810d18d-1792-4733-9325-5960e398f425" containerName="registry-server" containerID="cri-o://9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590" gracePeriod=2 Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.232059 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" event={"ID":"4f1039f1-a16f-4312-9e96-190abbc6f498","Type":"ContainerStarted","Data":"951f63895ca42c122248308fbf45ece3de2d0b1adfd0d49e056a0ba4ddea3fcb"} Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.246777 4835 scope.go:117] "RemoveContainer" containerID="9379c05dd3c7e26372f2c7b1b7d2b9eee211d02eafec34696c4ca623d5efa2f7" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.247090 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.318150 4835 scope.go:117] "RemoveContainer" containerID="a5595392ad94800fd4e2ff3d0f050e55c6c429a4b6d40946bed8e373b377a3c5" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.320332 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" podStartSLOduration=4.320320974 podStartE2EDuration="4.320320974s" podCreationTimestamp="2026-02-02 16:53:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:53:22.309863087 +0000 UTC m=+193.931467197" watchObservedRunningTime="2026-02-02 16:53:22.320320974 +0000 UTC m=+193.941925044" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.384260 4835 scope.go:117] "RemoveContainer" containerID="2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90" Feb 02 16:53:22 crc kubenswrapper[4835]: E0202 16:53:22.385062 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90\": container with ID starting with 2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90 not found: ID does not exist" containerID="2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.385103 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90"} err="failed to get container status \"2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90\": rpc error: code = NotFound desc = could not find container \"2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90\": container with ID starting with 2589791817ae0bce22818c20ccf638721dbde6a01405cc1e0aa4e803f65fcd90 not found: ID does not exist" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.385126 4835 scope.go:117] "RemoveContainer" containerID="9379c05dd3c7e26372f2c7b1b7d2b9eee211d02eafec34696c4ca623d5efa2f7" Feb 02 16:53:22 crc kubenswrapper[4835]: E0202 16:53:22.385373 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9379c05dd3c7e26372f2c7b1b7d2b9eee211d02eafec34696c4ca623d5efa2f7\": container with ID starting with 9379c05dd3c7e26372f2c7b1b7d2b9eee211d02eafec34696c4ca623d5efa2f7 not found: ID does not exist" containerID="9379c05dd3c7e26372f2c7b1b7d2b9eee211d02eafec34696c4ca623d5efa2f7" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.385393 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9379c05dd3c7e26372f2c7b1b7d2b9eee211d02eafec34696c4ca623d5efa2f7"} err="failed to get container status \"9379c05dd3c7e26372f2c7b1b7d2b9eee211d02eafec34696c4ca623d5efa2f7\": rpc error: code = NotFound desc = could not find container \"9379c05dd3c7e26372f2c7b1b7d2b9eee211d02eafec34696c4ca623d5efa2f7\": container with ID starting with 9379c05dd3c7e26372f2c7b1b7d2b9eee211d02eafec34696c4ca623d5efa2f7 not found: ID does not exist" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.385408 4835 scope.go:117] "RemoveContainer" containerID="a5595392ad94800fd4e2ff3d0f050e55c6c429a4b6d40946bed8e373b377a3c5" Feb 02 16:53:22 crc kubenswrapper[4835]: E0202 16:53:22.385596 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5595392ad94800fd4e2ff3d0f050e55c6c429a4b6d40946bed8e373b377a3c5\": container with ID starting with a5595392ad94800fd4e2ff3d0f050e55c6c429a4b6d40946bed8e373b377a3c5 not found: ID does not exist" containerID="a5595392ad94800fd4e2ff3d0f050e55c6c429a4b6d40946bed8e373b377a3c5" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.385617 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5595392ad94800fd4e2ff3d0f050e55c6c429a4b6d40946bed8e373b377a3c5"} err="failed to get container status \"a5595392ad94800fd4e2ff3d0f050e55c6c429a4b6d40946bed8e373b377a3c5\": rpc error: code = NotFound desc = could not find container \"a5595392ad94800fd4e2ff3d0f050e55c6c429a4b6d40946bed8e373b377a3c5\": container with ID starting with a5595392ad94800fd4e2ff3d0f050e55c6c429a4b6d40946bed8e373b377a3c5 not found: ID does not exist" Feb 02 16:53:22 crc kubenswrapper[4835]: I0202 16:53:22.423111 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8bccz"] Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.153734 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.199674 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" path="/var/lib/kubelet/pods/b4ac95a5-3112-4034-ab53-40d9c5cf13ac/volumes" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.246360 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" event={"ID":"4f1039f1-a16f-4312-9e96-190abbc6f498","Type":"ContainerStarted","Data":"df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864"} Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.246888 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.251190 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-utilities\") pod \"a810d18d-1792-4733-9325-5960e398f425\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.251246 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-catalog-content\") pod \"a810d18d-1792-4733-9325-5960e398f425\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.251454 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8vcw\" (UniqueName: \"kubernetes.io/projected/a810d18d-1792-4733-9325-5960e398f425-kube-api-access-x8vcw\") pod \"a810d18d-1792-4733-9325-5960e398f425\" (UID: \"a810d18d-1792-4733-9325-5960e398f425\") " Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.253709 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.256703 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-utilities" (OuterVolumeSpecName: "utilities") pod "a810d18d-1792-4733-9325-5960e398f425" (UID: "a810d18d-1792-4733-9325-5960e398f425"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.259885 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a810d18d-1792-4733-9325-5960e398f425-kube-api-access-x8vcw" (OuterVolumeSpecName: "kube-api-access-x8vcw") pod "a810d18d-1792-4733-9325-5960e398f425" (UID: "a810d18d-1792-4733-9325-5960e398f425"). InnerVolumeSpecName "kube-api-access-x8vcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.262309 4835 generic.go:334] "Generic (PLEG): container finished" podID="a810d18d-1792-4733-9325-5960e398f425" containerID="9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590" exitCode=0 Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.262600 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8bccz" podUID="56962d65-7f11-44f4-b09c-73302933d1a8" containerName="registry-server" containerID="cri-o://a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65" gracePeriod=2 Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.263284 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v9gmt" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.263604 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9gmt" event={"ID":"a810d18d-1792-4733-9325-5960e398f425","Type":"ContainerDied","Data":"9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590"} Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.263641 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9gmt" event={"ID":"a810d18d-1792-4733-9325-5960e398f425","Type":"ContainerDied","Data":"2f092efd10bbe80b3a035c0cc53be0f017e2c9a7ffcbdbb98d5bd8ea0d489779"} Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.263659 4835 scope.go:117] "RemoveContainer" containerID="9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.274375 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" podStartSLOduration=5.274346905 podStartE2EDuration="5.274346905s" podCreationTimestamp="2026-02-02 16:53:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:53:23.271480851 +0000 UTC m=+194.893084921" watchObservedRunningTime="2026-02-02 16:53:23.274346905 +0000 UTC m=+194.895950985" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.313068 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a810d18d-1792-4733-9325-5960e398f425" (UID: "a810d18d-1792-4733-9325-5960e398f425"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.313384 4835 scope.go:117] "RemoveContainer" containerID="d9a72eb82fa9c9df9ef4b5e54078a5e546d048212554a8d86eb20731272ec132" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.353526 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.353553 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a810d18d-1792-4733-9325-5960e398f425-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.353563 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8vcw\" (UniqueName: \"kubernetes.io/projected/a810d18d-1792-4733-9325-5960e398f425-kube-api-access-x8vcw\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.433613 4835 scope.go:117] "RemoveContainer" containerID="21972349088e52a971b97e7ed700dcfc54053b64d4ef4cfabdb38b57fd27bd47" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.458335 4835 scope.go:117] "RemoveContainer" containerID="9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590" Feb 02 16:53:23 crc kubenswrapper[4835]: E0202 16:53:23.458946 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590\": container with ID starting with 9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590 not found: ID does not exist" containerID="9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.459043 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590"} err="failed to get container status \"9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590\": rpc error: code = NotFound desc = could not find container \"9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590\": container with ID starting with 9aad5b6f2ff04f4ef23d2dcf68c72db346acebcd3a6f64f3261e9b50b7b1f590 not found: ID does not exist" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.459074 4835 scope.go:117] "RemoveContainer" containerID="d9a72eb82fa9c9df9ef4b5e54078a5e546d048212554a8d86eb20731272ec132" Feb 02 16:53:23 crc kubenswrapper[4835]: E0202 16:53:23.459535 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9a72eb82fa9c9df9ef4b5e54078a5e546d048212554a8d86eb20731272ec132\": container with ID starting with d9a72eb82fa9c9df9ef4b5e54078a5e546d048212554a8d86eb20731272ec132 not found: ID does not exist" containerID="d9a72eb82fa9c9df9ef4b5e54078a5e546d048212554a8d86eb20731272ec132" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.459597 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9a72eb82fa9c9df9ef4b5e54078a5e546d048212554a8d86eb20731272ec132"} err="failed to get container status \"d9a72eb82fa9c9df9ef4b5e54078a5e546d048212554a8d86eb20731272ec132\": rpc error: code = NotFound desc = could not find container \"d9a72eb82fa9c9df9ef4b5e54078a5e546d048212554a8d86eb20731272ec132\": container with ID starting with d9a72eb82fa9c9df9ef4b5e54078a5e546d048212554a8d86eb20731272ec132 not found: ID does not exist" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.459640 4835 scope.go:117] "RemoveContainer" containerID="21972349088e52a971b97e7ed700dcfc54053b64d4ef4cfabdb38b57fd27bd47" Feb 02 16:53:23 crc kubenswrapper[4835]: E0202 16:53:23.460224 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21972349088e52a971b97e7ed700dcfc54053b64d4ef4cfabdb38b57fd27bd47\": container with ID starting with 21972349088e52a971b97e7ed700dcfc54053b64d4ef4cfabdb38b57fd27bd47 not found: ID does not exist" containerID="21972349088e52a971b97e7ed700dcfc54053b64d4ef4cfabdb38b57fd27bd47" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.460254 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21972349088e52a971b97e7ed700dcfc54053b64d4ef4cfabdb38b57fd27bd47"} err="failed to get container status \"21972349088e52a971b97e7ed700dcfc54053b64d4ef4cfabdb38b57fd27bd47\": rpc error: code = NotFound desc = could not find container \"21972349088e52a971b97e7ed700dcfc54053b64d4ef4cfabdb38b57fd27bd47\": container with ID starting with 21972349088e52a971b97e7ed700dcfc54053b64d4ef4cfabdb38b57fd27bd47 not found: ID does not exist" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.609944 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.613685 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v9gmt"] Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.620858 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v9gmt"] Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.658081 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kubelet-dir\") pod \"60f72013-46a0-450d-9cf5-311eeaf4f0b0\" (UID: \"60f72013-46a0-450d-9cf5-311eeaf4f0b0\") " Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.658385 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kube-api-access\") pod \"60f72013-46a0-450d-9cf5-311eeaf4f0b0\" (UID: \"60f72013-46a0-450d-9cf5-311eeaf4f0b0\") " Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.661345 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "60f72013-46a0-450d-9cf5-311eeaf4f0b0" (UID: "60f72013-46a0-450d-9cf5-311eeaf4f0b0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.664629 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "60f72013-46a0-450d-9cf5-311eeaf4f0b0" (UID: "60f72013-46a0-450d-9cf5-311eeaf4f0b0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.760231 4835 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.760500 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60f72013-46a0-450d-9cf5-311eeaf4f0b0-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:23 crc kubenswrapper[4835]: I0202 16:53:23.935856 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.063086 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-utilities\") pod \"56962d65-7f11-44f4-b09c-73302933d1a8\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.063162 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-catalog-content\") pod \"56962d65-7f11-44f4-b09c-73302933d1a8\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.063239 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6c4pc\" (UniqueName: \"kubernetes.io/projected/56962d65-7f11-44f4-b09c-73302933d1a8-kube-api-access-6c4pc\") pod \"56962d65-7f11-44f4-b09c-73302933d1a8\" (UID: \"56962d65-7f11-44f4-b09c-73302933d1a8\") " Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.064294 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-utilities" (OuterVolumeSpecName: "utilities") pod "56962d65-7f11-44f4-b09c-73302933d1a8" (UID: "56962d65-7f11-44f4-b09c-73302933d1a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.071433 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56962d65-7f11-44f4-b09c-73302933d1a8-kube-api-access-6c4pc" (OuterVolumeSpecName: "kube-api-access-6c4pc") pod "56962d65-7f11-44f4-b09c-73302933d1a8" (UID: "56962d65-7f11-44f4-b09c-73302933d1a8"). InnerVolumeSpecName "kube-api-access-6c4pc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.100951 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56962d65-7f11-44f4-b09c-73302933d1a8" (UID: "56962d65-7f11-44f4-b09c-73302933d1a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.164954 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.164993 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56962d65-7f11-44f4-b09c-73302933d1a8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.165007 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6c4pc\" (UniqueName: \"kubernetes.io/projected/56962d65-7f11-44f4-b09c-73302933d1a8-kube-api-access-6c4pc\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.222446 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.222870 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56962d65-7f11-44f4-b09c-73302933d1a8" containerName="registry-server" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.222951 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="56962d65-7f11-44f4-b09c-73302933d1a8" containerName="registry-server" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.223013 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a810d18d-1792-4733-9325-5960e398f425" containerName="extract-content" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.223079 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a810d18d-1792-4733-9325-5960e398f425" containerName="extract-content" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.223136 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" containerName="registry-server" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.223194 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" containerName="registry-server" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.223249 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" containerName="extract-content" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.223332 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" containerName="extract-content" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.223394 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60f72013-46a0-450d-9cf5-311eeaf4f0b0" containerName="pruner" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.223455 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="60f72013-46a0-450d-9cf5-311eeaf4f0b0" containerName="pruner" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.223516 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56962d65-7f11-44f4-b09c-73302933d1a8" containerName="extract-utilities" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.223568 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="56962d65-7f11-44f4-b09c-73302933d1a8" containerName="extract-utilities" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.223625 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a810d18d-1792-4733-9325-5960e398f425" containerName="extract-utilities" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.223678 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a810d18d-1792-4733-9325-5960e398f425" containerName="extract-utilities" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.223732 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" containerName="extract-utilities" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.223790 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" containerName="extract-utilities" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.223857 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56962d65-7f11-44f4-b09c-73302933d1a8" containerName="extract-content" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.223932 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="56962d65-7f11-44f4-b09c-73302933d1a8" containerName="extract-content" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.224012 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a810d18d-1792-4733-9325-5960e398f425" containerName="registry-server" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.224156 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a810d18d-1792-4733-9325-5960e398f425" containerName="registry-server" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.224392 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="56962d65-7f11-44f4-b09c-73302933d1a8" containerName="registry-server" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.224493 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4ac95a5-3112-4034-ab53-40d9c5cf13ac" containerName="registry-server" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.224566 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a810d18d-1792-4733-9325-5960e398f425" containerName="registry-server" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.224632 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="60f72013-46a0-450d-9cf5-311eeaf4f0b0" containerName="pruner" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.225161 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.240433 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.280524 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.280675 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"60f72013-46a0-450d-9cf5-311eeaf4f0b0","Type":"ContainerDied","Data":"3e25337c628b5fdea3e0c16e4e37b00e8ecacd8026da6a2ae10d9ac6bead9183"} Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.280722 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e25337c628b5fdea3e0c16e4e37b00e8ecacd8026da6a2ae10d9ac6bead9183" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.284927 4835 generic.go:334] "Generic (PLEG): container finished" podID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" containerID="6be1e929caf8778ff72a1c4e8259008418c3b6c24857660a59c16090d3207b32" exitCode=0 Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.284984 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k2lvh" event={"ID":"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed","Type":"ContainerDied","Data":"6be1e929caf8778ff72a1c4e8259008418c3b6c24857660a59c16090d3207b32"} Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.289975 4835 generic.go:334] "Generic (PLEG): container finished" podID="56962d65-7f11-44f4-b09c-73302933d1a8" containerID="a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65" exitCode=0 Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.290025 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8bccz" event={"ID":"56962d65-7f11-44f4-b09c-73302933d1a8","Type":"ContainerDied","Data":"a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65"} Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.290181 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8bccz" event={"ID":"56962d65-7f11-44f4-b09c-73302933d1a8","Type":"ContainerDied","Data":"ce005b54e492d76c159b4acbf855108cdb545ed9c0b62bac20f0fdce6975c5fa"} Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.290221 4835 scope.go:117] "RemoveContainer" containerID="a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.290067 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8bccz" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.317349 4835 scope.go:117] "RemoveContainer" containerID="b076040a42fb11f3d2789b69e3bc695e34f3ccf6e0377cb5c36c5e86199cf474" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.330767 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8bccz"] Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.333819 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8bccz"] Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.338489 4835 scope.go:117] "RemoveContainer" containerID="41bfd59c9dc8c489df97bcc2d461123b939829775c55ace97f225efe733897d1" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.359800 4835 scope.go:117] "RemoveContainer" containerID="a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.360221 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65\": container with ID starting with a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65 not found: ID does not exist" containerID="a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.360262 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65"} err="failed to get container status \"a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65\": rpc error: code = NotFound desc = could not find container \"a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65\": container with ID starting with a1ac1928269448583da710e53b291c1c2e75c82f0ecbb75d2e8a0a87b15e2b65 not found: ID does not exist" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.360355 4835 scope.go:117] "RemoveContainer" containerID="b076040a42fb11f3d2789b69e3bc695e34f3ccf6e0377cb5c36c5e86199cf474" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.361001 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b076040a42fb11f3d2789b69e3bc695e34f3ccf6e0377cb5c36c5e86199cf474\": container with ID starting with b076040a42fb11f3d2789b69e3bc695e34f3ccf6e0377cb5c36c5e86199cf474 not found: ID does not exist" containerID="b076040a42fb11f3d2789b69e3bc695e34f3ccf6e0377cb5c36c5e86199cf474" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.361048 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b076040a42fb11f3d2789b69e3bc695e34f3ccf6e0377cb5c36c5e86199cf474"} err="failed to get container status \"b076040a42fb11f3d2789b69e3bc695e34f3ccf6e0377cb5c36c5e86199cf474\": rpc error: code = NotFound desc = could not find container \"b076040a42fb11f3d2789b69e3bc695e34f3ccf6e0377cb5c36c5e86199cf474\": container with ID starting with b076040a42fb11f3d2789b69e3bc695e34f3ccf6e0377cb5c36c5e86199cf474 not found: ID does not exist" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.361088 4835 scope.go:117] "RemoveContainer" containerID="41bfd59c9dc8c489df97bcc2d461123b939829775c55ace97f225efe733897d1" Feb 02 16:53:24 crc kubenswrapper[4835]: E0202 16:53:24.361492 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41bfd59c9dc8c489df97bcc2d461123b939829775c55ace97f225efe733897d1\": container with ID starting with 41bfd59c9dc8c489df97bcc2d461123b939829775c55ace97f225efe733897d1 not found: ID does not exist" containerID="41bfd59c9dc8c489df97bcc2d461123b939829775c55ace97f225efe733897d1" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.361519 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41bfd59c9dc8c489df97bcc2d461123b939829775c55ace97f225efe733897d1"} err="failed to get container status \"41bfd59c9dc8c489df97bcc2d461123b939829775c55ace97f225efe733897d1\": rpc error: code = NotFound desc = could not find container \"41bfd59c9dc8c489df97bcc2d461123b939829775c55ace97f225efe733897d1\": container with ID starting with 41bfd59c9dc8c489df97bcc2d461123b939829775c55ace97f225efe733897d1 not found: ID does not exist" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.367379 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.379280 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-var-lock\") pod \"installer-9-crc\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.379387 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kube-api-access\") pod \"installer-9-crc\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.480423 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.480504 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-var-lock\") pod \"installer-9-crc\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.480525 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kube-api-access\") pod \"installer-9-crc\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.480914 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.480945 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-var-lock\") pod \"installer-9-crc\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.501891 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kube-api-access\") pod \"installer-9-crc\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.545125 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:53:24 crc kubenswrapper[4835]: I0202 16:53:24.972699 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 16:53:24 crc kubenswrapper[4835]: W0202 16:53:24.983187 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf1a659cd_7abb_467d_afc2_fbdea0b38c04.slice/crio-d71c3433bce5e1fcae4f1860b0aeba359668f7f79d2c1469f38d8d67373821d5 WatchSource:0}: Error finding container d71c3433bce5e1fcae4f1860b0aeba359668f7f79d2c1469f38d8d67373821d5: Status 404 returned error can't find the container with id d71c3433bce5e1fcae4f1860b0aeba359668f7f79d2c1469f38d8d67373821d5 Feb 02 16:53:25 crc kubenswrapper[4835]: I0202 16:53:25.197892 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56962d65-7f11-44f4-b09c-73302933d1a8" path="/var/lib/kubelet/pods/56962d65-7f11-44f4-b09c-73302933d1a8/volumes" Feb 02 16:53:25 crc kubenswrapper[4835]: I0202 16:53:25.199314 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a810d18d-1792-4733-9325-5960e398f425" path="/var/lib/kubelet/pods/a810d18d-1792-4733-9325-5960e398f425/volumes" Feb 02 16:53:25 crc kubenswrapper[4835]: I0202 16:53:25.297653 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f1a659cd-7abb-467d-afc2-fbdea0b38c04","Type":"ContainerStarted","Data":"4c498dc3aeea36877f8e70e935f1657b1071768a0d7323f1bb60fb9266cf0569"} Feb 02 16:53:25 crc kubenswrapper[4835]: I0202 16:53:25.297706 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f1a659cd-7abb-467d-afc2-fbdea0b38c04","Type":"ContainerStarted","Data":"d71c3433bce5e1fcae4f1860b0aeba359668f7f79d2c1469f38d8d67373821d5"} Feb 02 16:53:25 crc kubenswrapper[4835]: I0202 16:53:25.298970 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-56ncc" event={"ID":"cd498040-6af5-4953-8b1c-ea3803ba1b2a","Type":"ContainerStarted","Data":"4b69cd929ce47f91a5ca980d55ebc62a9fab2ee4729abac24226b2d0cac8dea3"} Feb 02 16:53:25 crc kubenswrapper[4835]: I0202 16:53:25.301771 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k2lvh" event={"ID":"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed","Type":"ContainerStarted","Data":"958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63"} Feb 02 16:53:25 crc kubenswrapper[4835]: I0202 16:53:25.342601 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-k2lvh" podStartSLOduration=2.362489974 podStartE2EDuration="44.342580192s" podCreationTimestamp="2026-02-02 16:52:41 +0000 UTC" firstStartedPulling="2026-02-02 16:52:42.73368406 +0000 UTC m=+154.355288140" lastFinishedPulling="2026-02-02 16:53:24.713774278 +0000 UTC m=+196.335378358" observedRunningTime="2026-02-02 16:53:25.341415148 +0000 UTC m=+196.963019228" watchObservedRunningTime="2026-02-02 16:53:25.342580192 +0000 UTC m=+196.964184292" Feb 02 16:53:26 crc kubenswrapper[4835]: I0202 16:53:26.307931 4835 generic.go:334] "Generic (PLEG): container finished" podID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerID="4b69cd929ce47f91a5ca980d55ebc62a9fab2ee4729abac24226b2d0cac8dea3" exitCode=0 Feb 02 16:53:26 crc kubenswrapper[4835]: I0202 16:53:26.307995 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-56ncc" event={"ID":"cd498040-6af5-4953-8b1c-ea3803ba1b2a","Type":"ContainerDied","Data":"4b69cd929ce47f91a5ca980d55ebc62a9fab2ee4729abac24226b2d0cac8dea3"} Feb 02 16:53:26 crc kubenswrapper[4835]: I0202 16:53:26.341004 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.3409827659999998 podStartE2EDuration="2.340982766s" podCreationTimestamp="2026-02-02 16:53:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:53:26.337196966 +0000 UTC m=+197.958801056" watchObservedRunningTime="2026-02-02 16:53:26.340982766 +0000 UTC m=+197.962586846" Feb 02 16:53:27 crc kubenswrapper[4835]: I0202 16:53:27.313957 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-56ncc" event={"ID":"cd498040-6af5-4953-8b1c-ea3803ba1b2a","Type":"ContainerStarted","Data":"89c7081530301f1bc3271517c27bf9405ca5efcc7560c52443e1c05181c0f153"} Feb 02 16:53:27 crc kubenswrapper[4835]: I0202 16:53:27.352437 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-56ncc" podStartSLOduration=3.387857007 podStartE2EDuration="47.352421314s" podCreationTimestamp="2026-02-02 16:52:40 +0000 UTC" firstStartedPulling="2026-02-02 16:52:42.734419512 +0000 UTC m=+154.356023592" lastFinishedPulling="2026-02-02 16:53:26.698983819 +0000 UTC m=+198.320587899" observedRunningTime="2026-02-02 16:53:27.350788087 +0000 UTC m=+198.972392167" watchObservedRunningTime="2026-02-02 16:53:27.352421314 +0000 UTC m=+198.974025394" Feb 02 16:53:31 crc kubenswrapper[4835]: I0202 16:53:31.207572 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:53:31 crc kubenswrapper[4835]: I0202 16:53:31.208125 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:53:31 crc kubenswrapper[4835]: I0202 16:53:31.662813 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:53:31 crc kubenswrapper[4835]: I0202 16:53:31.663602 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:53:31 crc kubenswrapper[4835]: I0202 16:53:31.704824 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:53:32 crc kubenswrapper[4835]: I0202 16:53:32.253020 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-56ncc" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerName="registry-server" probeResult="failure" output=< Feb 02 16:53:32 crc kubenswrapper[4835]: timeout: failed to connect service ":50051" within 1s Feb 02 16:53:32 crc kubenswrapper[4835]: > Feb 02 16:53:32 crc kubenswrapper[4835]: I0202 16:53:32.390696 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:53:33 crc kubenswrapper[4835]: I0202 16:53:33.025235 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k2lvh"] Feb 02 16:53:34 crc kubenswrapper[4835]: I0202 16:53:34.354597 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-k2lvh" podUID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" containerName="registry-server" containerID="cri-o://958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63" gracePeriod=2 Feb 02 16:53:34 crc kubenswrapper[4835]: I0202 16:53:34.872464 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:53:34 crc kubenswrapper[4835]: I0202 16:53:34.930741 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gms2p\" (UniqueName: \"kubernetes.io/projected/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-kube-api-access-gms2p\") pod \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " Feb 02 16:53:34 crc kubenswrapper[4835]: I0202 16:53:34.930899 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-utilities\") pod \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " Feb 02 16:53:34 crc kubenswrapper[4835]: I0202 16:53:34.930931 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-catalog-content\") pod \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\" (UID: \"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed\") " Feb 02 16:53:34 crc kubenswrapper[4835]: I0202 16:53:34.932033 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-utilities" (OuterVolumeSpecName: "utilities") pod "bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" (UID: "bf17cfb8-28ee-49e0-b0b3-df8c08b3efed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:53:34 crc kubenswrapper[4835]: I0202 16:53:34.947470 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-kube-api-access-gms2p" (OuterVolumeSpecName: "kube-api-access-gms2p") pod "bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" (UID: "bf17cfb8-28ee-49e0-b0b3-df8c08b3efed"). InnerVolumeSpecName "kube-api-access-gms2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.032465 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.032515 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gms2p\" (UniqueName: \"kubernetes.io/projected/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-kube-api-access-gms2p\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.234894 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" (UID: "bf17cfb8-28ee-49e0-b0b3-df8c08b3efed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.235298 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.364716 4835 generic.go:334] "Generic (PLEG): container finished" podID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" containerID="958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63" exitCode=0 Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.364770 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k2lvh" event={"ID":"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed","Type":"ContainerDied","Data":"958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63"} Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.364816 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k2lvh" event={"ID":"bf17cfb8-28ee-49e0-b0b3-df8c08b3efed","Type":"ContainerDied","Data":"1d9cfa29b2a5447115419257f8efad811bb647aa2922c3dd64f5c13a90d3125d"} Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.364841 4835 scope.go:117] "RemoveContainer" containerID="958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.366263 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k2lvh" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.381764 4835 scope.go:117] "RemoveContainer" containerID="6be1e929caf8778ff72a1c4e8259008418c3b6c24857660a59c16090d3207b32" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.398194 4835 scope.go:117] "RemoveContainer" containerID="a5c2162d2e6264c55e602bf1e8709907d8864c3da64121fe4284b8f491f05845" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.430302 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k2lvh"] Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.434934 4835 scope.go:117] "RemoveContainer" containerID="958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63" Feb 02 16:53:35 crc kubenswrapper[4835]: E0202 16:53:35.436451 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63\": container with ID starting with 958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63 not found: ID does not exist" containerID="958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.436504 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63"} err="failed to get container status \"958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63\": rpc error: code = NotFound desc = could not find container \"958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63\": container with ID starting with 958167edfb32b61d40d6b7a57b2b9a29d0a85ad1f80b92f6e11dfc02bcf67e63 not found: ID does not exist" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.436537 4835 scope.go:117] "RemoveContainer" containerID="6be1e929caf8778ff72a1c4e8259008418c3b6c24857660a59c16090d3207b32" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.439543 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-k2lvh"] Feb 02 16:53:35 crc kubenswrapper[4835]: E0202 16:53:35.441384 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6be1e929caf8778ff72a1c4e8259008418c3b6c24857660a59c16090d3207b32\": container with ID starting with 6be1e929caf8778ff72a1c4e8259008418c3b6c24857660a59c16090d3207b32 not found: ID does not exist" containerID="6be1e929caf8778ff72a1c4e8259008418c3b6c24857660a59c16090d3207b32" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.441452 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6be1e929caf8778ff72a1c4e8259008418c3b6c24857660a59c16090d3207b32"} err="failed to get container status \"6be1e929caf8778ff72a1c4e8259008418c3b6c24857660a59c16090d3207b32\": rpc error: code = NotFound desc = could not find container \"6be1e929caf8778ff72a1c4e8259008418c3b6c24857660a59c16090d3207b32\": container with ID starting with 6be1e929caf8778ff72a1c4e8259008418c3b6c24857660a59c16090d3207b32 not found: ID does not exist" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.441486 4835 scope.go:117] "RemoveContainer" containerID="a5c2162d2e6264c55e602bf1e8709907d8864c3da64121fe4284b8f491f05845" Feb 02 16:53:35 crc kubenswrapper[4835]: E0202 16:53:35.442065 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5c2162d2e6264c55e602bf1e8709907d8864c3da64121fe4284b8f491f05845\": container with ID starting with a5c2162d2e6264c55e602bf1e8709907d8864c3da64121fe4284b8f491f05845 not found: ID does not exist" containerID="a5c2162d2e6264c55e602bf1e8709907d8864c3da64121fe4284b8f491f05845" Feb 02 16:53:35 crc kubenswrapper[4835]: I0202 16:53:35.442088 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5c2162d2e6264c55e602bf1e8709907d8864c3da64121fe4284b8f491f05845"} err="failed to get container status \"a5c2162d2e6264c55e602bf1e8709907d8864c3da64121fe4284b8f491f05845\": rpc error: code = NotFound desc = could not find container \"a5c2162d2e6264c55e602bf1e8709907d8864c3da64121fe4284b8f491f05845\": container with ID starting with a5c2162d2e6264c55e602bf1e8709907d8864c3da64121fe4284b8f491f05845 not found: ID does not exist" Feb 02 16:53:37 crc kubenswrapper[4835]: I0202 16:53:37.195391 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" path="/var/lib/kubelet/pods/bf17cfb8-28ee-49e0-b0b3-df8c08b3efed/volumes" Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.300970 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-66cb587848-zftz8"] Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.301254 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" podUID="4f1039f1-a16f-4312-9e96-190abbc6f498" containerName="controller-manager" containerID="cri-o://df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864" gracePeriod=30 Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.316876 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc"] Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.317156 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" podUID="afd1976e-b8b2-40a7-8281-c62ed81f7e39" containerName="route-controller-manager" containerID="cri-o://e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37" gracePeriod=30 Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.854229 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.859884 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.911750 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzw2q\" (UniqueName: \"kubernetes.io/projected/afd1976e-b8b2-40a7-8281-c62ed81f7e39-kube-api-access-tzw2q\") pod \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.911811 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-config\") pod \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.911868 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-client-ca\") pod \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.911945 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afd1976e-b8b2-40a7-8281-c62ed81f7e39-serving-cert\") pod \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\" (UID: \"afd1976e-b8b2-40a7-8281-c62ed81f7e39\") " Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.912671 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-client-ca" (OuterVolumeSpecName: "client-ca") pod "afd1976e-b8b2-40a7-8281-c62ed81f7e39" (UID: "afd1976e-b8b2-40a7-8281-c62ed81f7e39"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.912729 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-config" (OuterVolumeSpecName: "config") pod "afd1976e-b8b2-40a7-8281-c62ed81f7e39" (UID: "afd1976e-b8b2-40a7-8281-c62ed81f7e39"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.917344 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afd1976e-b8b2-40a7-8281-c62ed81f7e39-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "afd1976e-b8b2-40a7-8281-c62ed81f7e39" (UID: "afd1976e-b8b2-40a7-8281-c62ed81f7e39"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:53:38 crc kubenswrapper[4835]: I0202 16:53:38.918382 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afd1976e-b8b2-40a7-8281-c62ed81f7e39-kube-api-access-tzw2q" (OuterVolumeSpecName: "kube-api-access-tzw2q") pod "afd1976e-b8b2-40a7-8281-c62ed81f7e39" (UID: "afd1976e-b8b2-40a7-8281-c62ed81f7e39"). InnerVolumeSpecName "kube-api-access-tzw2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.015122 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-config\") pod \"4f1039f1-a16f-4312-9e96-190abbc6f498\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.016315 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-proxy-ca-bundles\") pod \"4f1039f1-a16f-4312-9e96-190abbc6f498\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.016781 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "4f1039f1-a16f-4312-9e96-190abbc6f498" (UID: "4f1039f1-a16f-4312-9e96-190abbc6f498"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.016836 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2snr\" (UniqueName: \"kubernetes.io/projected/4f1039f1-a16f-4312-9e96-190abbc6f498-kube-api-access-x2snr\") pod \"4f1039f1-a16f-4312-9e96-190abbc6f498\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.016865 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-client-ca\") pod \"4f1039f1-a16f-4312-9e96-190abbc6f498\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.016889 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f1039f1-a16f-4312-9e96-190abbc6f498-serving-cert\") pod \"4f1039f1-a16f-4312-9e96-190abbc6f498\" (UID: \"4f1039f1-a16f-4312-9e96-190abbc6f498\") " Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.016968 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-config" (OuterVolumeSpecName: "config") pod "4f1039f1-a16f-4312-9e96-190abbc6f498" (UID: "4f1039f1-a16f-4312-9e96-190abbc6f498"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.017146 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-client-ca" (OuterVolumeSpecName: "client-ca") pod "4f1039f1-a16f-4312-9e96-190abbc6f498" (UID: "4f1039f1-a16f-4312-9e96-190abbc6f498"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.017480 4835 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.017505 4835 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.017515 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzw2q\" (UniqueName: \"kubernetes.io/projected/afd1976e-b8b2-40a7-8281-c62ed81f7e39-kube-api-access-tzw2q\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.017530 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.017541 4835 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/afd1976e-b8b2-40a7-8281-c62ed81f7e39-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.017552 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f1039f1-a16f-4312-9e96-190abbc6f498-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.017563 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afd1976e-b8b2-40a7-8281-c62ed81f7e39-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.019828 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f1039f1-a16f-4312-9e96-190abbc6f498-kube-api-access-x2snr" (OuterVolumeSpecName: "kube-api-access-x2snr") pod "4f1039f1-a16f-4312-9e96-190abbc6f498" (UID: "4f1039f1-a16f-4312-9e96-190abbc6f498"). InnerVolumeSpecName "kube-api-access-x2snr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.020672 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f1039f1-a16f-4312-9e96-190abbc6f498-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4f1039f1-a16f-4312-9e96-190abbc6f498" (UID: "4f1039f1-a16f-4312-9e96-190abbc6f498"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.118514 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2snr\" (UniqueName: \"kubernetes.io/projected/4f1039f1-a16f-4312-9e96-190abbc6f498-kube-api-access-x2snr\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.118555 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f1039f1-a16f-4312-9e96-190abbc6f498-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.384955 4835 generic.go:334] "Generic (PLEG): container finished" podID="afd1976e-b8b2-40a7-8281-c62ed81f7e39" containerID="e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37" exitCode=0 Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.385005 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" event={"ID":"afd1976e-b8b2-40a7-8281-c62ed81f7e39","Type":"ContainerDied","Data":"e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37"} Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.385036 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.385051 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc" event={"ID":"afd1976e-b8b2-40a7-8281-c62ed81f7e39","Type":"ContainerDied","Data":"08731bfaff330546e6ac192d55ee8906a51ab96fc91d28d1e91a80b8c62a178b"} Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.385080 4835 scope.go:117] "RemoveContainer" containerID="e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.387772 4835 generic.go:334] "Generic (PLEG): container finished" podID="4f1039f1-a16f-4312-9e96-190abbc6f498" containerID="df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864" exitCode=0 Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.387803 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" event={"ID":"4f1039f1-a16f-4312-9e96-190abbc6f498","Type":"ContainerDied","Data":"df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864"} Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.387826 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" event={"ID":"4f1039f1-a16f-4312-9e96-190abbc6f498","Type":"ContainerDied","Data":"951f63895ca42c122248308fbf45ece3de2d0b1adfd0d49e056a0ba4ddea3fcb"} Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.388392 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66cb587848-zftz8" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.412676 4835 scope.go:117] "RemoveContainer" containerID="e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37" Feb 02 16:53:39 crc kubenswrapper[4835]: E0202 16:53:39.413777 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37\": container with ID starting with e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37 not found: ID does not exist" containerID="e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.413837 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37"} err="failed to get container status \"e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37\": rpc error: code = NotFound desc = could not find container \"e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37\": container with ID starting with e0b9fd4a28a0ff56ee0140a0ae5d2730e0ad9d990a242314d459eee1edf74a37 not found: ID does not exist" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.413960 4835 scope.go:117] "RemoveContainer" containerID="df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.424975 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc"] Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.432684 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74c9f58944-vcngc"] Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.436666 4835 scope.go:117] "RemoveContainer" containerID="df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.436807 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-66cb587848-zftz8"] Feb 02 16:53:39 crc kubenswrapper[4835]: E0202 16:53:39.437146 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864\": container with ID starting with df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864 not found: ID does not exist" containerID="df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.437181 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864"} err="failed to get container status \"df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864\": rpc error: code = NotFound desc = could not find container \"df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864\": container with ID starting with df8716b559cedc4bfc85df961e693a9c9f9b5f75acdea5f2d3b05db61caee864 not found: ID does not exist" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.440511 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-66cb587848-zftz8"] Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.475138 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-79d8d88754-bdkrc"] Feb 02 16:53:39 crc kubenswrapper[4835]: E0202 16:53:39.475366 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f1039f1-a16f-4312-9e96-190abbc6f498" containerName="controller-manager" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.475380 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f1039f1-a16f-4312-9e96-190abbc6f498" containerName="controller-manager" Feb 02 16:53:39 crc kubenswrapper[4835]: E0202 16:53:39.475391 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" containerName="extract-content" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.475397 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" containerName="extract-content" Feb 02 16:53:39 crc kubenswrapper[4835]: E0202 16:53:39.475416 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" containerName="extract-utilities" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.475422 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" containerName="extract-utilities" Feb 02 16:53:39 crc kubenswrapper[4835]: E0202 16:53:39.475430 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afd1976e-b8b2-40a7-8281-c62ed81f7e39" containerName="route-controller-manager" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.475436 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="afd1976e-b8b2-40a7-8281-c62ed81f7e39" containerName="route-controller-manager" Feb 02 16:53:39 crc kubenswrapper[4835]: E0202 16:53:39.475446 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" containerName="registry-server" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.475452 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" containerName="registry-server" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.475557 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf17cfb8-28ee-49e0-b0b3-df8c08b3efed" containerName="registry-server" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.475570 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f1039f1-a16f-4312-9e96-190abbc6f498" containerName="controller-manager" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.475580 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="afd1976e-b8b2-40a7-8281-c62ed81f7e39" containerName="route-controller-manager" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.476000 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.477511 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6"] Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.482858 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.483565 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.483735 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.483878 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.483984 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.484236 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.485057 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.487328 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.487423 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.487574 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.487874 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.489885 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.490366 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.491083 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.491386 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6"] Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.494530 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-79d8d88754-bdkrc"] Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.624770 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbg95\" (UniqueName: \"kubernetes.io/projected/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-kube-api-access-rbg95\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.625177 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-client-ca\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.625507 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-serving-cert\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.625803 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-proxy-ca-bundles\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.625993 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-client-ca\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.626245 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9dc7\" (UniqueName: \"kubernetes.io/projected/8c626adc-1337-4bf9-8b14-b8f63d070ffb-kube-api-access-g9dc7\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.626541 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-config\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.626806 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-config\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.627000 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c626adc-1337-4bf9-8b14-b8f63d070ffb-serving-cert\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.728677 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbg95\" (UniqueName: \"kubernetes.io/projected/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-kube-api-access-rbg95\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.728753 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-client-ca\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.728798 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-serving-cert\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.728839 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-proxy-ca-bundles\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.728864 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-client-ca\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.728893 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9dc7\" (UniqueName: \"kubernetes.io/projected/8c626adc-1337-4bf9-8b14-b8f63d070ffb-kube-api-access-g9dc7\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.728919 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-config\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.728959 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-config\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.728986 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c626adc-1337-4bf9-8b14-b8f63d070ffb-serving-cert\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.730387 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-client-ca\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.730796 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-config\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.730936 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-client-ca\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.731129 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-config\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.732718 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c626adc-1337-4bf9-8b14-b8f63d070ffb-serving-cert\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.733773 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-proxy-ca-bundles\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.736006 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-serving-cert\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.744098 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbg95\" (UniqueName: \"kubernetes.io/projected/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-kube-api-access-rbg95\") pod \"route-controller-manager-5f6b66f84b-w2ks6\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.758439 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9dc7\" (UniqueName: \"kubernetes.io/projected/8c626adc-1337-4bf9-8b14-b8f63d070ffb-kube-api-access-g9dc7\") pod \"controller-manager-79d8d88754-bdkrc\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.799314 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.809914 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:39 crc kubenswrapper[4835]: I0202 16:53:39.987777 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-79d8d88754-bdkrc"] Feb 02 16:53:39 crc kubenswrapper[4835]: W0202 16:53:39.995203 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c626adc_1337_4bf9_8b14_b8f63d070ffb.slice/crio-e3cfa73cfda71c753e5b859302c38ad08fa84f26ce1fb0ac0fae3869270a328d WatchSource:0}: Error finding container e3cfa73cfda71c753e5b859302c38ad08fa84f26ce1fb0ac0fae3869270a328d: Status 404 returned error can't find the container with id e3cfa73cfda71c753e5b859302c38ad08fa84f26ce1fb0ac0fae3869270a328d Feb 02 16:53:40 crc kubenswrapper[4835]: I0202 16:53:40.074296 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6"] Feb 02 16:53:40 crc kubenswrapper[4835]: I0202 16:53:40.394298 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" event={"ID":"8c626adc-1337-4bf9-8b14-b8f63d070ffb","Type":"ContainerStarted","Data":"38361dd9bda8300000bf10dd6d44ccd1c1315920cff41e00fe69abd0daf630f2"} Feb 02 16:53:40 crc kubenswrapper[4835]: I0202 16:53:40.394386 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:40 crc kubenswrapper[4835]: I0202 16:53:40.394403 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" event={"ID":"8c626adc-1337-4bf9-8b14-b8f63d070ffb","Type":"ContainerStarted","Data":"e3cfa73cfda71c753e5b859302c38ad08fa84f26ce1fb0ac0fae3869270a328d"} Feb 02 16:53:40 crc kubenswrapper[4835]: I0202 16:53:40.398188 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" event={"ID":"7d25a1ea-db07-4b75-8a8c-f559780e8ee1","Type":"ContainerStarted","Data":"c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc"} Feb 02 16:53:40 crc kubenswrapper[4835]: I0202 16:53:40.398255 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" event={"ID":"7d25a1ea-db07-4b75-8a8c-f559780e8ee1","Type":"ContainerStarted","Data":"0edb71c17d83ea044dbbb97ff2c9b444f98e995c29f8838dd9017430f832da40"} Feb 02 16:53:40 crc kubenswrapper[4835]: I0202 16:53:40.398414 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:40 crc kubenswrapper[4835]: I0202 16:53:40.403635 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:40 crc kubenswrapper[4835]: I0202 16:53:40.413634 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" podStartSLOduration=2.413615856 podStartE2EDuration="2.413615856s" podCreationTimestamp="2026-02-02 16:53:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:53:40.410480675 +0000 UTC m=+212.032084775" watchObservedRunningTime="2026-02-02 16:53:40.413615856 +0000 UTC m=+212.035219936" Feb 02 16:53:40 crc kubenswrapper[4835]: I0202 16:53:40.460451 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" podStartSLOduration=2.460427158 podStartE2EDuration="2.460427158s" podCreationTimestamp="2026-02-02 16:53:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:53:40.45877433 +0000 UTC m=+212.080378410" watchObservedRunningTime="2026-02-02 16:53:40.460427158 +0000 UTC m=+212.082031238" Feb 02 16:53:40 crc kubenswrapper[4835]: I0202 16:53:40.575112 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:41 crc kubenswrapper[4835]: I0202 16:53:41.206375 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f1039f1-a16f-4312-9e96-190abbc6f498" path="/var/lib/kubelet/pods/4f1039f1-a16f-4312-9e96-190abbc6f498/volumes" Feb 02 16:53:41 crc kubenswrapper[4835]: I0202 16:53:41.207956 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afd1976e-b8b2-40a7-8281-c62ed81f7e39" path="/var/lib/kubelet/pods/afd1976e-b8b2-40a7-8281-c62ed81f7e39/volumes" Feb 02 16:53:41 crc kubenswrapper[4835]: I0202 16:53:41.271302 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:53:41 crc kubenswrapper[4835]: I0202 16:53:41.336145 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:53:44 crc kubenswrapper[4835]: I0202 16:53:44.870955 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 16:53:44 crc kubenswrapper[4835]: I0202 16:53:44.871483 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 16:53:44 crc kubenswrapper[4835]: I0202 16:53:44.871562 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:53:44 crc kubenswrapper[4835]: I0202 16:53:44.872612 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 16:53:44 crc kubenswrapper[4835]: I0202 16:53:44.872703 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8" gracePeriod=600 Feb 02 16:53:45 crc kubenswrapper[4835]: I0202 16:53:45.441092 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8" exitCode=0 Feb 02 16:53:45 crc kubenswrapper[4835]: I0202 16:53:45.441157 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8"} Feb 02 16:53:45 crc kubenswrapper[4835]: I0202 16:53:45.441541 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"b71282d2471b88b91c03e4f9e85d7d1903f9682be4501b8d87dfc0ade7c2e31e"} Feb 02 16:53:49 crc kubenswrapper[4835]: I0202 16:53:49.545612 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-2n9fx"] Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.322976 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-79d8d88754-bdkrc"] Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.325427 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" podUID="8c626adc-1337-4bf9-8b14-b8f63d070ffb" containerName="controller-manager" containerID="cri-o://38361dd9bda8300000bf10dd6d44ccd1c1315920cff41e00fe69abd0daf630f2" gracePeriod=30 Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.421219 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6"] Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.421452 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" podUID="7d25a1ea-db07-4b75-8a8c-f559780e8ee1" containerName="route-controller-manager" containerID="cri-o://c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc" gracePeriod=30 Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.539618 4835 generic.go:334] "Generic (PLEG): container finished" podID="8c626adc-1337-4bf9-8b14-b8f63d070ffb" containerID="38361dd9bda8300000bf10dd6d44ccd1c1315920cff41e00fe69abd0daf630f2" exitCode=0 Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.539664 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" event={"ID":"8c626adc-1337-4bf9-8b14-b8f63d070ffb","Type":"ContainerDied","Data":"38361dd9bda8300000bf10dd6d44ccd1c1315920cff41e00fe69abd0daf630f2"} Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.869691 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.875473 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.988230 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9dc7\" (UniqueName: \"kubernetes.io/projected/8c626adc-1337-4bf9-8b14-b8f63d070ffb-kube-api-access-g9dc7\") pod \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.988326 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbg95\" (UniqueName: \"kubernetes.io/projected/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-kube-api-access-rbg95\") pod \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.988369 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c626adc-1337-4bf9-8b14-b8f63d070ffb-serving-cert\") pod \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.988410 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-client-ca\") pod \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.988434 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-serving-cert\") pod \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.988458 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-proxy-ca-bundles\") pod \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.988479 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-config\") pod \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\" (UID: \"7d25a1ea-db07-4b75-8a8c-f559780e8ee1\") " Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.988523 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-client-ca\") pod \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.988575 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-config\") pod \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\" (UID: \"8c626adc-1337-4bf9-8b14-b8f63d070ffb\") " Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.989622 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8c626adc-1337-4bf9-8b14-b8f63d070ffb" (UID: "8c626adc-1337-4bf9-8b14-b8f63d070ffb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.989666 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-client-ca" (OuterVolumeSpecName: "client-ca") pod "8c626adc-1337-4bf9-8b14-b8f63d070ffb" (UID: "8c626adc-1337-4bf9-8b14-b8f63d070ffb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.989902 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-config" (OuterVolumeSpecName: "config") pod "8c626adc-1337-4bf9-8b14-b8f63d070ffb" (UID: "8c626adc-1337-4bf9-8b14-b8f63d070ffb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.990928 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-client-ca" (OuterVolumeSpecName: "client-ca") pod "7d25a1ea-db07-4b75-8a8c-f559780e8ee1" (UID: "7d25a1ea-db07-4b75-8a8c-f559780e8ee1"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.991086 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-config" (OuterVolumeSpecName: "config") pod "7d25a1ea-db07-4b75-8a8c-f559780e8ee1" (UID: "7d25a1ea-db07-4b75-8a8c-f559780e8ee1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.994184 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7d25a1ea-db07-4b75-8a8c-f559780e8ee1" (UID: "7d25a1ea-db07-4b75-8a8c-f559780e8ee1"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.994219 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-kube-api-access-rbg95" (OuterVolumeSpecName: "kube-api-access-rbg95") pod "7d25a1ea-db07-4b75-8a8c-f559780e8ee1" (UID: "7d25a1ea-db07-4b75-8a8c-f559780e8ee1"). InnerVolumeSpecName "kube-api-access-rbg95". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.995461 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c626adc-1337-4bf9-8b14-b8f63d070ffb-kube-api-access-g9dc7" (OuterVolumeSpecName: "kube-api-access-g9dc7") pod "8c626adc-1337-4bf9-8b14-b8f63d070ffb" (UID: "8c626adc-1337-4bf9-8b14-b8f63d070ffb"). InnerVolumeSpecName "kube-api-access-g9dc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:53:58 crc kubenswrapper[4835]: I0202 16:53:58.996065 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c626adc-1337-4bf9-8b14-b8f63d070ffb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8c626adc-1337-4bf9-8b14-b8f63d070ffb" (UID: "8c626adc-1337-4bf9-8b14-b8f63d070ffb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.089729 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9dc7\" (UniqueName: \"kubernetes.io/projected/8c626adc-1337-4bf9-8b14-b8f63d070ffb-kube-api-access-g9dc7\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.089768 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbg95\" (UniqueName: \"kubernetes.io/projected/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-kube-api-access-rbg95\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.089782 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c626adc-1337-4bf9-8b14-b8f63d070ffb-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.089793 4835 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.089803 4835 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.089814 4835 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.089826 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d25a1ea-db07-4b75-8a8c-f559780e8ee1-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.089840 4835 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.089853 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c626adc-1337-4bf9-8b14-b8f63d070ffb-config\") on node \"crc\" DevicePath \"\"" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.492014 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg"] Feb 02 16:53:59 crc kubenswrapper[4835]: E0202 16:53:59.492590 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c626adc-1337-4bf9-8b14-b8f63d070ffb" containerName="controller-manager" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.492615 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c626adc-1337-4bf9-8b14-b8f63d070ffb" containerName="controller-manager" Feb 02 16:53:59 crc kubenswrapper[4835]: E0202 16:53:59.492640 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d25a1ea-db07-4b75-8a8c-f559780e8ee1" containerName="route-controller-manager" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.492652 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d25a1ea-db07-4b75-8a8c-f559780e8ee1" containerName="route-controller-manager" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.492858 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c626adc-1337-4bf9-8b14-b8f63d070ffb" containerName="controller-manager" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.493037 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d25a1ea-db07-4b75-8a8c-f559780e8ee1" containerName="route-controller-manager" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.497558 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.498153 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-57c6bd8655-s8csf"] Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.498851 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.510145 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg"] Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.525836 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-57c6bd8655-s8csf"] Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.545392 4835 generic.go:334] "Generic (PLEG): container finished" podID="7d25a1ea-db07-4b75-8a8c-f559780e8ee1" containerID="c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc" exitCode=0 Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.545444 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.545450 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" event={"ID":"7d25a1ea-db07-4b75-8a8c-f559780e8ee1","Type":"ContainerDied","Data":"c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc"} Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.545813 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6" event={"ID":"7d25a1ea-db07-4b75-8a8c-f559780e8ee1","Type":"ContainerDied","Data":"0edb71c17d83ea044dbbb97ff2c9b444f98e995c29f8838dd9017430f832da40"} Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.545838 4835 scope.go:117] "RemoveContainer" containerID="c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.547012 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" event={"ID":"8c626adc-1337-4bf9-8b14-b8f63d070ffb","Type":"ContainerDied","Data":"e3cfa73cfda71c753e5b859302c38ad08fa84f26ce1fb0ac0fae3869270a328d"} Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.547086 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79d8d88754-bdkrc" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.568111 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-79d8d88754-bdkrc"] Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.576681 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-79d8d88754-bdkrc"] Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.577569 4835 scope.go:117] "RemoveContainer" containerID="c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc" Feb 02 16:53:59 crc kubenswrapper[4835]: E0202 16:53:59.578129 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc\": container with ID starting with c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc not found: ID does not exist" containerID="c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.578215 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc"} err="failed to get container status \"c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc\": rpc error: code = NotFound desc = could not find container \"c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc\": container with ID starting with c818358dd255f9272ca8ddde97034c6844982065d12d2c327352f7a50c8f29dc not found: ID does not exist" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.578402 4835 scope.go:117] "RemoveContainer" containerID="38361dd9bda8300000bf10dd6d44ccd1c1315920cff41e00fe69abd0daf630f2" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.581234 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6"] Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.585418 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f6b66f84b-w2ks6"] Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.596910 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a3b21bf-88b4-4df6-9449-a2833685222a-serving-cert\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.596942 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-client-ca\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.596963 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxn7f\" (UniqueName: \"kubernetes.io/projected/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-kube-api-access-zxn7f\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.596989 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a3b21bf-88b4-4df6-9449-a2833685222a-client-ca\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.597006 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-config\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.597031 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a3b21bf-88b4-4df6-9449-a2833685222a-config\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.597050 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxv5z\" (UniqueName: \"kubernetes.io/projected/5a3b21bf-88b4-4df6-9449-a2833685222a-kube-api-access-kxv5z\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.597065 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-proxy-ca-bundles\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.597092 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-serving-cert\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.698690 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a3b21bf-88b4-4df6-9449-a2833685222a-config\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.698776 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxv5z\" (UniqueName: \"kubernetes.io/projected/5a3b21bf-88b4-4df6-9449-a2833685222a-kube-api-access-kxv5z\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.698814 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-proxy-ca-bundles\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.698877 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-serving-cert\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.698984 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a3b21bf-88b4-4df6-9449-a2833685222a-serving-cert\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.699025 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-client-ca\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.699053 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxn7f\" (UniqueName: \"kubernetes.io/projected/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-kube-api-access-zxn7f\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.699087 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a3b21bf-88b4-4df6-9449-a2833685222a-client-ca\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.699120 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-config\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.700942 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-config\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.702373 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a3b21bf-88b4-4df6-9449-a2833685222a-config\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.703997 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-client-ca\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.704246 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a3b21bf-88b4-4df6-9449-a2833685222a-client-ca\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.704430 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-proxy-ca-bundles\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.708978 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-serving-cert\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.708978 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a3b21bf-88b4-4df6-9449-a2833685222a-serving-cert\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.726294 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxn7f\" (UniqueName: \"kubernetes.io/projected/aa61f9d5-ef2d-4ad7-a022-17d453b03d1c-kube-api-access-zxn7f\") pod \"controller-manager-57c6bd8655-s8csf\" (UID: \"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c\") " pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.726344 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxv5z\" (UniqueName: \"kubernetes.io/projected/5a3b21bf-88b4-4df6-9449-a2833685222a-kube-api-access-kxv5z\") pod \"route-controller-manager-6fb865fc65-wxxbg\" (UID: \"5a3b21bf-88b4-4df6-9449-a2833685222a\") " pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.834730 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:53:59 crc kubenswrapper[4835]: I0202 16:53:59.845851 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.242648 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg"] Feb 02 16:54:00 crc kubenswrapper[4835]: W0202 16:54:00.247625 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a3b21bf_88b4_4df6_9449_a2833685222a.slice/crio-dcdb3a0aae1be3294edeaad982b6ab549b3ec46fd769e0a0e2b7baba568dc596 WatchSource:0}: Error finding container dcdb3a0aae1be3294edeaad982b6ab549b3ec46fd769e0a0e2b7baba568dc596: Status 404 returned error can't find the container with id dcdb3a0aae1be3294edeaad982b6ab549b3ec46fd769e0a0e2b7baba568dc596 Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.271876 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-57c6bd8655-s8csf"] Feb 02 16:54:00 crc kubenswrapper[4835]: W0202 16:54:00.275867 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa61f9d5_ef2d_4ad7_a022_17d453b03d1c.slice/crio-41afc8d79485a3d250cde6315eb5737bbc395fa002afcfce6913307bc11d47d5 WatchSource:0}: Error finding container 41afc8d79485a3d250cde6315eb5737bbc395fa002afcfce6913307bc11d47d5: Status 404 returned error can't find the container with id 41afc8d79485a3d250cde6315eb5737bbc395fa002afcfce6913307bc11d47d5 Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.556234 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" event={"ID":"5a3b21bf-88b4-4df6-9449-a2833685222a","Type":"ContainerStarted","Data":"3eb1f3c7b68361c023943fddd954e1dbdbf8c5e3f74a79c5905e67785a23fc30"} Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.556295 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" event={"ID":"5a3b21bf-88b4-4df6-9449-a2833685222a","Type":"ContainerStarted","Data":"dcdb3a0aae1be3294edeaad982b6ab549b3ec46fd769e0a0e2b7baba568dc596"} Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.557433 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.560891 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" event={"ID":"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c","Type":"ContainerStarted","Data":"12210b42c2ccbd57cc59301298846a820791fc716f8c8f5da99a6dc035652a75"} Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.560933 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" event={"ID":"aa61f9d5-ef2d-4ad7-a022-17d453b03d1c","Type":"ContainerStarted","Data":"41afc8d79485a3d250cde6315eb5737bbc395fa002afcfce6913307bc11d47d5"} Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.561936 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.584999 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.613943 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" podStartSLOduration=2.6139197039999997 podStartE2EDuration="2.613919704s" podCreationTimestamp="2026-02-02 16:53:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:54:00.609427043 +0000 UTC m=+232.231031153" watchObservedRunningTime="2026-02-02 16:54:00.613919704 +0000 UTC m=+232.235523794" Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.652631 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-57c6bd8655-s8csf" podStartSLOduration=2.652611389 podStartE2EDuration="2.652611389s" podCreationTimestamp="2026-02-02 16:53:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:54:00.650015114 +0000 UTC m=+232.271619214" watchObservedRunningTime="2026-02-02 16:54:00.652611389 +0000 UTC m=+232.274215469" Feb 02 16:54:00 crc kubenswrapper[4835]: I0202 16:54:00.764924 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6fb865fc65-wxxbg" Feb 02 16:54:01 crc kubenswrapper[4835]: I0202 16:54:01.199746 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d25a1ea-db07-4b75-8a8c-f559780e8ee1" path="/var/lib/kubelet/pods/7d25a1ea-db07-4b75-8a8c-f559780e8ee1/volumes" Feb 02 16:54:01 crc kubenswrapper[4835]: I0202 16:54:01.200799 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c626adc-1337-4bf9-8b14-b8f63d070ffb" path="/var/lib/kubelet/pods/8c626adc-1337-4bf9-8b14-b8f63d070ffb/volumes" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.172668 4835 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.173805 4835 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.173947 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.174097 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad" gracePeriod=15 Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.174252 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153" gracePeriod=15 Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.174209 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9" gracePeriod=15 Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.174354 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496" gracePeriod=15 Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.174470 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d" gracePeriod=15 Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.176596 4835 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 16:54:03 crc kubenswrapper[4835]: E0202 16:54:03.177005 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177023 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 16:54:03 crc kubenswrapper[4835]: E0202 16:54:03.177035 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177041 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 16:54:03 crc kubenswrapper[4835]: E0202 16:54:03.177049 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177055 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 16:54:03 crc kubenswrapper[4835]: E0202 16:54:03.177062 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177069 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 16:54:03 crc kubenswrapper[4835]: E0202 16:54:03.177086 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177093 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 16:54:03 crc kubenswrapper[4835]: E0202 16:54:03.177101 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177107 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 02 16:54:03 crc kubenswrapper[4835]: E0202 16:54:03.177116 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177123 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177322 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177340 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177352 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177360 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177372 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.177379 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.243672 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.243740 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.243782 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.243829 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.243867 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.243960 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.243976 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.243991 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.344928 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.344992 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345018 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345072 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345095 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345101 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345128 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345117 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345166 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345163 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345193 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345226 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345432 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345455 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345554 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.345620 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.584456 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.587207 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.588468 4835 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9" exitCode=0 Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.588505 4835 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d" exitCode=0 Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.588518 4835 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496" exitCode=0 Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.588530 4835 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153" exitCode=2 Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.588536 4835 scope.go:117] "RemoveContainer" containerID="7542e968b6b5b5b1200f5c792b79fcbe74c14a89b299413a631db632542c2c4d" Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.591191 4835 generic.go:334] "Generic (PLEG): container finished" podID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" containerID="4c498dc3aeea36877f8e70e935f1657b1071768a0d7323f1bb60fb9266cf0569" exitCode=0 Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.591236 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f1a659cd-7abb-467d-afc2-fbdea0b38c04","Type":"ContainerDied","Data":"4c498dc3aeea36877f8e70e935f1657b1071768a0d7323f1bb60fb9266cf0569"} Feb 02 16:54:03 crc kubenswrapper[4835]: I0202 16:54:03.592540 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:04 crc kubenswrapper[4835]: I0202 16:54:04.604790 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.055113 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.055552 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.170784 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kubelet-dir\") pod \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.170876 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kube-api-access\") pod \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.170916 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-var-lock\") pod \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\" (UID: \"f1a659cd-7abb-467d-afc2-fbdea0b38c04\") " Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.171078 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f1a659cd-7abb-467d-afc2-fbdea0b38c04" (UID: "f1a659cd-7abb-467d-afc2-fbdea0b38c04"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.171156 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-var-lock" (OuterVolumeSpecName: "var-lock") pod "f1a659cd-7abb-467d-afc2-fbdea0b38c04" (UID: "f1a659cd-7abb-467d-afc2-fbdea0b38c04"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.182514 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f1a659cd-7abb-467d-afc2-fbdea0b38c04" (UID: "f1a659cd-7abb-467d-afc2-fbdea0b38c04"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.272045 4835 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.272547 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f1a659cd-7abb-467d-afc2-fbdea0b38c04-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.272597 4835 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f1a659cd-7abb-467d-afc2-fbdea0b38c04-var-lock\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.602168 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.605200 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.606360 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.606945 4835 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.617235 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.617903 4835 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad" exitCode=0 Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.617968 4835 scope.go:117] "RemoveContainer" containerID="8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.618364 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.619613 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f1a659cd-7abb-467d-afc2-fbdea0b38c04","Type":"ContainerDied","Data":"d71c3433bce5e1fcae4f1860b0aeba359668f7f79d2c1469f38d8d67373821d5"} Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.619640 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d71c3433bce5e1fcae4f1860b0aeba359668f7f79d2c1469f38d8d67373821d5" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.619672 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.622890 4835 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.623046 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.633047 4835 scope.go:117] "RemoveContainer" containerID="f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.649046 4835 scope.go:117] "RemoveContainer" containerID="557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.675541 4835 scope.go:117] "RemoveContainer" containerID="c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.679172 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.679421 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.679544 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.680423 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.680548 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.680640 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.695452 4835 scope.go:117] "RemoveContainer" containerID="221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.711982 4835 scope.go:117] "RemoveContainer" containerID="e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.734423 4835 scope.go:117] "RemoveContainer" containerID="8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9" Feb 02 16:54:05 crc kubenswrapper[4835]: E0202 16:54:05.734870 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\": container with ID starting with 8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9 not found: ID does not exist" containerID="8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.734928 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9"} err="failed to get container status \"8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\": rpc error: code = NotFound desc = could not find container \"8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9\": container with ID starting with 8863c13061cd285e8c8fdbe9e7f3481fdf5afc8d8c2beec1040a8b2831c8cfa9 not found: ID does not exist" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.734964 4835 scope.go:117] "RemoveContainer" containerID="f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d" Feb 02 16:54:05 crc kubenswrapper[4835]: E0202 16:54:05.735587 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\": container with ID starting with f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d not found: ID does not exist" containerID="f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.735618 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d"} err="failed to get container status \"f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\": rpc error: code = NotFound desc = could not find container \"f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d\": container with ID starting with f2c5878ba4d07a28c64ea4f642aebaa05d2ed2dc3afb4011c387a3b1bc696d4d not found: ID does not exist" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.735657 4835 scope.go:117] "RemoveContainer" containerID="557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496" Feb 02 16:54:05 crc kubenswrapper[4835]: E0202 16:54:05.735944 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\": container with ID starting with 557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496 not found: ID does not exist" containerID="557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.735982 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496"} err="failed to get container status \"557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\": rpc error: code = NotFound desc = could not find container \"557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496\": container with ID starting with 557e9e05b8f9249fe3c39b3df03b7ed81b4c6d6b3afc48497e2f4d87313de496 not found: ID does not exist" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.736003 4835 scope.go:117] "RemoveContainer" containerID="c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153" Feb 02 16:54:05 crc kubenswrapper[4835]: E0202 16:54:05.736356 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\": container with ID starting with c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153 not found: ID does not exist" containerID="c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.736385 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153"} err="failed to get container status \"c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\": rpc error: code = NotFound desc = could not find container \"c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153\": container with ID starting with c030df724c9b034186427e7dce786ddced8f890790bb1a7fa03ac78698afc153 not found: ID does not exist" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.736422 4835 scope.go:117] "RemoveContainer" containerID="221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad" Feb 02 16:54:05 crc kubenswrapper[4835]: E0202 16:54:05.736711 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\": container with ID starting with 221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad not found: ID does not exist" containerID="221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.736751 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad"} err="failed to get container status \"221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\": rpc error: code = NotFound desc = could not find container \"221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad\": container with ID starting with 221ecb9082dfb5ba6a285383d482616ffa9462231ec4ff84a01b1269f53b43ad not found: ID does not exist" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.736765 4835 scope.go:117] "RemoveContainer" containerID="e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac" Feb 02 16:54:05 crc kubenswrapper[4835]: E0202 16:54:05.737166 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\": container with ID starting with e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac not found: ID does not exist" containerID="e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.737184 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac"} err="failed to get container status \"e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\": rpc error: code = NotFound desc = could not find container \"e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac\": container with ID starting with e6a31ae543d9d31de5b746263b6ab158be338e0d7c5ae5b973c4326f4fede0ac not found: ID does not exist" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.781605 4835 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.781663 4835 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.781690 4835 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.936666 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:05 crc kubenswrapper[4835]: I0202 16:54:05.937126 4835 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:07 crc kubenswrapper[4835]: I0202 16:54:07.198866 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Feb 02 16:54:08 crc kubenswrapper[4835]: E0202 16:54:08.228066 4835 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.245:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:08 crc kubenswrapper[4835]: I0202 16:54:08.228676 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:08 crc kubenswrapper[4835]: W0202 16:54:08.259177 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-674740df95ff19bd5afeb5bee87de0164d1e861c8485d6c78c15f0aa6abbb8f5 WatchSource:0}: Error finding container 674740df95ff19bd5afeb5bee87de0164d1e861c8485d6c78c15f0aa6abbb8f5: Status 404 returned error can't find the container with id 674740df95ff19bd5afeb5bee87de0164d1e861c8485d6c78c15f0aa6abbb8f5 Feb 02 16:54:08 crc kubenswrapper[4835]: E0202 16:54:08.264411 4835 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.245:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.18907c3b6efeb843 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 16:54:08.263706691 +0000 UTC m=+239.885310811,LastTimestamp:2026-02-02 16:54:08.263706691 +0000 UTC m=+239.885310811,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 16:54:08 crc kubenswrapper[4835]: I0202 16:54:08.641248 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"0e9701e7cca6cc4d7b600f64bf551fb91b761f68d073722f1f539c1eb35bdbe1"} Feb 02 16:54:08 crc kubenswrapper[4835]: I0202 16:54:08.641716 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"674740df95ff19bd5afeb5bee87de0164d1e861c8485d6c78c15f0aa6abbb8f5"} Feb 02 16:54:08 crc kubenswrapper[4835]: E0202 16:54:08.642437 4835 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.245:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:08 crc kubenswrapper[4835]: I0202 16:54:08.642592 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: I0202 16:54:09.205697 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.354828 4835 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.355346 4835 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.355780 4835 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.356220 4835 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.356726 4835 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: I0202 16:54:09.356776 4835 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.358465 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" interval="200ms" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.559233 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" interval="400ms" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.786622 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:54:09Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:54:09Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:54:09Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T16:54:09Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.787097 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.787398 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.787670 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.787989 4835 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.788014 4835 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 16:54:09 crc kubenswrapper[4835]: E0202 16:54:09.960348 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" interval="800ms" Feb 02 16:54:10 crc kubenswrapper[4835]: E0202 16:54:10.761398 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" interval="1.6s" Feb 02 16:54:12 crc kubenswrapper[4835]: E0202 16:54:12.362946 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" interval="3.2s" Feb 02 16:54:14 crc kubenswrapper[4835]: I0202 16:54:14.590789 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" podUID="ea193cb5-8e86-4628-a115-16a3987f4eaf" containerName="oauth-openshift" containerID="cri-o://6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6" gracePeriod=15 Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.188647 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.189573 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.229781 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.230398 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.230687 4835 status_manager.go:851] "Failed to get status for pod" podUID="ea193cb5-8e86-4628-a115-16a3987f4eaf" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-2n9fx\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.240736 4835 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.240777 4835 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:15 crc kubenswrapper[4835]: E0202 16:54:15.241123 4835 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.241756 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:15 crc kubenswrapper[4835]: W0202 16:54:15.270309 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-196dcac17d7b6449470d8a0ea77e6ceb4570ac9a3916b69187a24c101407fbc8 WatchSource:0}: Error finding container 196dcac17d7b6449470d8a0ea77e6ceb4570ac9a3916b69187a24c101407fbc8: Status 404 returned error can't find the container with id 196dcac17d7b6449470d8a0ea77e6ceb4570ac9a3916b69187a24c101407fbc8 Feb 02 16:54:15 crc kubenswrapper[4835]: E0202 16:54:15.274968 4835 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.245:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" volumeName="registry-storage" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.306872 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-dir\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.306968 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-error\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307013 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-provider-selection\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307060 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307077 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-login\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307139 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-idp-0-file-data\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307190 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-cliconfig\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307228 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-service-ca\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307267 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tvbq\" (UniqueName: \"kubernetes.io/projected/ea193cb5-8e86-4628-a115-16a3987f4eaf-kube-api-access-9tvbq\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307355 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-session\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307414 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-router-certs\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307458 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-trusted-ca-bundle\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307501 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-ocp-branding-template\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307540 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-policies\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.307737 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-serving-cert\") pod \"ea193cb5-8e86-4628-a115-16a3987f4eaf\" (UID: \"ea193cb5-8e86-4628-a115-16a3987f4eaf\") " Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.308515 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.308813 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.308876 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.309502 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.309544 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.309556 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.309576 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.309603 4835 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.313701 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.314515 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.314608 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea193cb5-8e86-4628-a115-16a3987f4eaf-kube-api-access-9tvbq" (OuterVolumeSpecName: "kube-api-access-9tvbq") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "kube-api-access-9tvbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.315550 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.315916 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.316022 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.316344 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.316563 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.316979 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "ea193cb5-8e86-4628-a115-16a3987f4eaf" (UID: "ea193cb5-8e86-4628-a115-16a3987f4eaf"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.411894 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.411957 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tvbq\" (UniqueName: \"kubernetes.io/projected/ea193cb5-8e86-4628-a115-16a3987f4eaf-kube-api-access-9tvbq\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.411978 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.411997 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.412016 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.412038 4835 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ea193cb5-8e86-4628-a115-16a3987f4eaf-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.412057 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.412080 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.412106 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.412132 4835 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ea193cb5-8e86-4628-a115-16a3987f4eaf-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:15 crc kubenswrapper[4835]: E0202 16:54:15.565428 4835 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.245:6443: connect: connection refused" interval="6.4s" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.693617 4835 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="f1e0696080151915ee1e89fe7d3eb42f92d5eef773c2c5fbfa51859ae845a0e8" exitCode=0 Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.693743 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"f1e0696080151915ee1e89fe7d3eb42f92d5eef773c2c5fbfa51859ae845a0e8"} Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.693857 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"196dcac17d7b6449470d8a0ea77e6ceb4570ac9a3916b69187a24c101407fbc8"} Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.694432 4835 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.694474 4835 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.694842 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.695356 4835 status_manager.go:851] "Failed to get status for pod" podUID="ea193cb5-8e86-4628-a115-16a3987f4eaf" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-2n9fx\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:15 crc kubenswrapper[4835]: E0202 16:54:15.695365 4835 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.697733 4835 generic.go:334] "Generic (PLEG): container finished" podID="ea193cb5-8e86-4628-a115-16a3987f4eaf" containerID="6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6" exitCode=0 Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.697784 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" event={"ID":"ea193cb5-8e86-4628-a115-16a3987f4eaf","Type":"ContainerDied","Data":"6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6"} Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.697862 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" event={"ID":"ea193cb5-8e86-4628-a115-16a3987f4eaf","Type":"ContainerDied","Data":"879d2e0ee4fdf155d1bf2c61155063211432fee08da8c7c75fc0c6fac4df9983"} Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.697897 4835 scope.go:117] "RemoveContainer" containerID="6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.697981 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.698962 4835 status_manager.go:851] "Failed to get status for pod" podUID="ea193cb5-8e86-4628-a115-16a3987f4eaf" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-2n9fx\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.699489 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.764540 4835 status_manager.go:851] "Failed to get status for pod" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.765014 4835 status_manager.go:851] "Failed to get status for pod" podUID="ea193cb5-8e86-4628-a115-16a3987f4eaf" pod="openshift-authentication/oauth-openshift-558db77b4-2n9fx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-2n9fx\": dial tcp 38.102.83.245:6443: connect: connection refused" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.776884 4835 scope.go:117] "RemoveContainer" containerID="6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6" Feb 02 16:54:15 crc kubenswrapper[4835]: E0202 16:54:15.777543 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6\": container with ID starting with 6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6 not found: ID does not exist" containerID="6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.777581 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6"} err="failed to get container status \"6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6\": rpc error: code = NotFound desc = could not find container \"6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6\": container with ID starting with 6e43a5ef09353dfa2324cfc24220d4e8f1e5bd70956abad67000e6b5b27307b6 not found: ID does not exist" Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.845659 4835 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Liveness probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": read tcp 192.168.126.11:53528->192.168.126.11:10257: read: connection reset by peer" start-of-body= Feb 02 16:54:15 crc kubenswrapper[4835]: I0202 16:54:15.845747 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": read tcp 192.168.126.11:53528->192.168.126.11:10257: read: connection reset by peer" Feb 02 16:54:16 crc kubenswrapper[4835]: I0202 16:54:16.713196 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b8a735c47d4f2d8254e1f9dc16f2434f32e5d9fc60237b1f5535545cd2c96085"} Feb 02 16:54:16 crc kubenswrapper[4835]: I0202 16:54:16.713552 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7b84a7d890c66d60654fca00f781a81575df0bbeaf7ee7e1566f842aa5938a45"} Feb 02 16:54:16 crc kubenswrapper[4835]: I0202 16:54:16.713577 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0f3f4354b94c5fd03252794da6bc0ca79523d609928b2cfd6a3c02427751ee74"} Feb 02 16:54:16 crc kubenswrapper[4835]: I0202 16:54:16.713588 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7297c7d02193d653bc39476d3633179966a032b16c245824e5bbb7b963f1f224"} Feb 02 16:54:16 crc kubenswrapper[4835]: I0202 16:54:16.719814 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 02 16:54:16 crc kubenswrapper[4835]: I0202 16:54:16.719868 4835 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4" exitCode=1 Feb 02 16:54:16 crc kubenswrapper[4835]: I0202 16:54:16.719903 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4"} Feb 02 16:54:16 crc kubenswrapper[4835]: I0202 16:54:16.720394 4835 scope.go:117] "RemoveContainer" containerID="f5f166a2d92af4d36aab63da031872dd7f697724ab664b863aa7c06e132118a4" Feb 02 16:54:17 crc kubenswrapper[4835]: I0202 16:54:17.506837 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:54:17 crc kubenswrapper[4835]: I0202 16:54:17.732194 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"902052a8d272e735fa17ba013eb9024703204c9f263b79903d470c5433b89e15"} Feb 02 16:54:17 crc kubenswrapper[4835]: I0202 16:54:17.732386 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:17 crc kubenswrapper[4835]: I0202 16:54:17.732431 4835 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:17 crc kubenswrapper[4835]: I0202 16:54:17.732464 4835 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:17 crc kubenswrapper[4835]: I0202 16:54:17.737837 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 02 16:54:17 crc kubenswrapper[4835]: I0202 16:54:17.737896 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b693cee234052d4d0ec0198f7ccac5768980e515c290b3539c2e8fffe8985533"} Feb 02 16:54:19 crc kubenswrapper[4835]: I0202 16:54:19.872688 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:54:20 crc kubenswrapper[4835]: I0202 16:54:20.242363 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:20 crc kubenswrapper[4835]: I0202 16:54:20.242425 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:20 crc kubenswrapper[4835]: I0202 16:54:20.250804 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:22 crc kubenswrapper[4835]: I0202 16:54:22.745253 4835 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:22 crc kubenswrapper[4835]: I0202 16:54:22.775452 4835 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:22 crc kubenswrapper[4835]: I0202 16:54:22.775490 4835 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:22 crc kubenswrapper[4835]: I0202 16:54:22.781777 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:22 crc kubenswrapper[4835]: I0202 16:54:22.786330 4835 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="846342d0-9336-4211-a691-811b915c822f" Feb 02 16:54:23 crc kubenswrapper[4835]: I0202 16:54:23.780625 4835 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:23 crc kubenswrapper[4835]: I0202 16:54:23.780918 4835 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:27 crc kubenswrapper[4835]: I0202 16:54:27.507355 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:54:27 crc kubenswrapper[4835]: I0202 16:54:27.513371 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:54:27 crc kubenswrapper[4835]: I0202 16:54:27.814021 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 16:54:29 crc kubenswrapper[4835]: I0202 16:54:29.215158 4835 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="846342d0-9336-4211-a691-811b915c822f" Feb 02 16:54:32 crc kubenswrapper[4835]: I0202 16:54:32.302144 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 02 16:54:32 crc kubenswrapper[4835]: I0202 16:54:32.610681 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 02 16:54:32 crc kubenswrapper[4835]: I0202 16:54:32.695631 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 02 16:54:32 crc kubenswrapper[4835]: I0202 16:54:32.974445 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 02 16:54:33 crc kubenswrapper[4835]: I0202 16:54:33.645437 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 02 16:54:33 crc kubenswrapper[4835]: I0202 16:54:33.887084 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 02 16:54:33 crc kubenswrapper[4835]: I0202 16:54:33.895961 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 16:54:33 crc kubenswrapper[4835]: I0202 16:54:33.975121 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 02 16:54:34 crc kubenswrapper[4835]: I0202 16:54:34.413415 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 16:54:34 crc kubenswrapper[4835]: I0202 16:54:34.787376 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 02 16:54:34 crc kubenswrapper[4835]: I0202 16:54:34.892740 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 02 16:54:35 crc kubenswrapper[4835]: I0202 16:54:35.109643 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 02 16:54:35 crc kubenswrapper[4835]: I0202 16:54:35.135117 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 02 16:54:35 crc kubenswrapper[4835]: I0202 16:54:35.172078 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 02 16:54:35 crc kubenswrapper[4835]: I0202 16:54:35.295187 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 02 16:54:35 crc kubenswrapper[4835]: I0202 16:54:35.433200 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 02 16:54:35 crc kubenswrapper[4835]: I0202 16:54:35.656242 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 02 16:54:35 crc kubenswrapper[4835]: I0202 16:54:35.706351 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 02 16:54:35 crc kubenswrapper[4835]: I0202 16:54:35.722851 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.010586 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.040574 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.060789 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.263579 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.268634 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.497943 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.609623 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.711612 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.712508 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.760857 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.822464 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.855297 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.919784 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.965886 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 02 16:54:36 crc kubenswrapper[4835]: I0202 16:54:36.967914 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 16:54:37 crc kubenswrapper[4835]: I0202 16:54:37.345419 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 02 16:54:37 crc kubenswrapper[4835]: I0202 16:54:37.456379 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 02 16:54:37 crc kubenswrapper[4835]: I0202 16:54:37.538114 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 02 16:54:37 crc kubenswrapper[4835]: I0202 16:54:37.617479 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 02 16:54:37 crc kubenswrapper[4835]: I0202 16:54:37.620060 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 02 16:54:37 crc kubenswrapper[4835]: I0202 16:54:37.622078 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 02 16:54:37 crc kubenswrapper[4835]: I0202 16:54:37.880592 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.039872 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.123784 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.124516 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.233440 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.299719 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.312062 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.377714 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.437961 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.505124 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.515830 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.533857 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.672644 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.726120 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.909748 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 02 16:54:38 crc kubenswrapper[4835]: I0202 16:54:38.965172 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.028527 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.073423 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.122423 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.247867 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.278201 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.291539 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.294007 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.302370 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.380072 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.408618 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.474049 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.492323 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.571386 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.575506 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.702804 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.733101 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.943859 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 02 16:54:39 crc kubenswrapper[4835]: I0202 16:54:39.992724 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.073865 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.099953 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.101380 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.120951 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.187947 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.190500 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.193550 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.259637 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.379447 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.395936 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.560321 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.573829 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.608366 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.653574 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.698520 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.753287 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.850518 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.887455 4835 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.912249 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 02 16:54:40 crc kubenswrapper[4835]: I0202 16:54:40.982749 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.013644 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.020538 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.042122 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.156340 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.196676 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.256733 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.276409 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.307977 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.383749 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.399698 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.400764 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.461437 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.474552 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.493855 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.502754 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.700793 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.710554 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.717297 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.767125 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.769546 4835 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.823484 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 02 16:54:41 crc kubenswrapper[4835]: I0202 16:54:41.876251 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.091025 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.133411 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.173102 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.222138 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.272165 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.320997 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.327131 4835 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.334790 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-2n9fx"] Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.334874 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7b874c479d-vr2k7","openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 16:54:42 crc kubenswrapper[4835]: E0202 16:54:42.335296 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea193cb5-8e86-4628-a115-16a3987f4eaf" containerName="oauth-openshift" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.335315 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea193cb5-8e86-4628-a115-16a3987f4eaf" containerName="oauth-openshift" Feb 02 16:54:42 crc kubenswrapper[4835]: E0202 16:54:42.335344 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" containerName="installer" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.335361 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" containerName="installer" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.335595 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1a659cd-7abb-467d-afc2-fbdea0b38c04" containerName="installer" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.335626 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea193cb5-8e86-4628-a115-16a3987f4eaf" containerName="oauth-openshift" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.336362 4835 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.336441 4835 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="91c77461-4f7a-42a8-ad4a-48d0aeb9e4e1" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.336874 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.339343 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.339709 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.339742 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.339760 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.339831 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.339921 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.340094 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.340383 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.340603 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.345540 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.345646 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.346674 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.348637 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.353440 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.358799 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.365623 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.368646 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=20.368625773 podStartE2EDuration="20.368625773s" podCreationTimestamp="2026-02-02 16:54:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:54:42.365486212 +0000 UTC m=+273.987090312" watchObservedRunningTime="2026-02-02 16:54:42.368625773 +0000 UTC m=+273.990229853" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462027 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-template-login\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462107 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-session\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462166 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462191 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462220 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462251 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462378 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-audit-dir\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462407 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462446 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462477 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-audit-policies\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462496 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9gbz\" (UniqueName: \"kubernetes.io/projected/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-kube-api-access-x9gbz\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462521 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462551 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-template-error\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.462573 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.490537 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.516897 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.563781 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.563828 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.563861 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-audit-dir\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.563883 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.563915 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.563937 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-audit-policies\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.563953 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9gbz\" (UniqueName: \"kubernetes.io/projected/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-kube-api-access-x9gbz\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.563971 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.563990 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-template-error\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.564006 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.564031 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-template-login\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.564051 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-session\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.564067 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.564088 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.564795 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.565185 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-audit-dir\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.565416 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-audit-policies\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.565794 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.566056 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.570366 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.571323 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.572643 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-template-login\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.572811 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.573382 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.573921 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-system-session\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.576516 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.583431 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-v4-0-config-user-template-error\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.585237 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9gbz\" (UniqueName: \"kubernetes.io/projected/9e1b90bd-a143-4bd7-8de0-d08e72b816bf-kube-api-access-x9gbz\") pod \"oauth-openshift-7b874c479d-vr2k7\" (UID: \"9e1b90bd-a143-4bd7-8de0-d08e72b816bf\") " pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.597202 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.665875 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.666899 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.687862 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.701132 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.743435 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.810805 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.887925 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.917205 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.960898 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 02 16:54:42 crc kubenswrapper[4835]: I0202 16:54:42.967537 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7b874c479d-vr2k7"] Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.020157 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.195358 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea193cb5-8e86-4628-a115-16a3987f4eaf" path="/var/lib/kubelet/pods/ea193cb5-8e86-4628-a115-16a3987f4eaf/volumes" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.414756 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.460669 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.522910 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.579824 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.594303 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.610149 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.747309 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.803204 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.804747 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.805225 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.806376 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.911299 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" event={"ID":"9e1b90bd-a143-4bd7-8de0-d08e72b816bf","Type":"ContainerStarted","Data":"704707aec723eb15c5c23a13433145e41aabc5b77bdf07e354985d431fc1e005"} Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.911333 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" event={"ID":"9e1b90bd-a143-4bd7-8de0-d08e72b816bf","Type":"ContainerStarted","Data":"bf44eec80c3b5c04ecfa22ab5dca7ff9cb426f78e167b9de189d21fb8c8ffdb8"} Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.911372 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.928338 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" podStartSLOduration=54.928322152 podStartE2EDuration="54.928322152s" podCreationTimestamp="2026-02-02 16:53:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:54:43.926552021 +0000 UTC m=+275.548156121" watchObservedRunningTime="2026-02-02 16:54:43.928322152 +0000 UTC m=+275.549926232" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.960419 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7b874c479d-vr2k7" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.994157 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.994579 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 02 16:54:43 crc kubenswrapper[4835]: I0202 16:54:43.996259 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.056947 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.063754 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.130409 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.294111 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.366613 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.425150 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.438984 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.462967 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.491994 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.553013 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.620410 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.642332 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.747889 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.752184 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.753184 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.838067 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.858927 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.890421 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.894923 4835 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.895185 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://0e9701e7cca6cc4d7b600f64bf551fb91b761f68d073722f1f539c1eb35bdbe1" gracePeriod=5 Feb 02 16:54:44 crc kubenswrapper[4835]: I0202 16:54:44.962052 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.075156 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.164898 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.186285 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.222306 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.231083 4835 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.314991 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.324856 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.430470 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.434541 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.504568 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.571414 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.609527 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.733710 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.748240 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.783908 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.799238 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.840643 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.845686 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 02 16:54:45 crc kubenswrapper[4835]: I0202 16:54:45.997738 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.019204 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.195705 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.227558 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.268955 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.285233 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.327736 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.370445 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.411404 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.595433 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.627771 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.692312 4835 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.825661 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.832016 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 02 16:54:46 crc kubenswrapper[4835]: I0202 16:54:46.882499 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.145371 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.173563 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.189159 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.199230 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.297378 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.310174 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.426508 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.543700 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.581071 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.594069 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.599588 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 02 16:54:47 crc kubenswrapper[4835]: I0202 16:54:47.755866 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 02 16:54:48 crc kubenswrapper[4835]: I0202 16:54:48.044670 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 02 16:54:48 crc kubenswrapper[4835]: I0202 16:54:48.084670 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 02 16:54:48 crc kubenswrapper[4835]: I0202 16:54:48.240769 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 02 16:54:48 crc kubenswrapper[4835]: I0202 16:54:48.263111 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 02 16:54:48 crc kubenswrapper[4835]: I0202 16:54:48.307945 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 02 16:54:48 crc kubenswrapper[4835]: I0202 16:54:48.470944 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 02 16:54:48 crc kubenswrapper[4835]: I0202 16:54:48.492867 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 02 16:54:48 crc kubenswrapper[4835]: I0202 16:54:48.509945 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 02 16:54:48 crc kubenswrapper[4835]: I0202 16:54:48.925581 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 02 16:54:48 crc kubenswrapper[4835]: I0202 16:54:48.972629 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 02 16:54:49 crc kubenswrapper[4835]: I0202 16:54:49.140008 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 02 16:54:49 crc kubenswrapper[4835]: I0202 16:54:49.168476 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 02 16:54:49 crc kubenswrapper[4835]: I0202 16:54:49.207468 4835 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 02 16:54:49 crc kubenswrapper[4835]: I0202 16:54:49.261241 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 02 16:54:49 crc kubenswrapper[4835]: I0202 16:54:49.285446 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 02 16:54:49 crc kubenswrapper[4835]: I0202 16:54:49.427660 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 02 16:54:49 crc kubenswrapper[4835]: I0202 16:54:49.659024 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 02 16:54:49 crc kubenswrapper[4835]: I0202 16:54:49.730129 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 02 16:54:49 crc kubenswrapper[4835]: I0202 16:54:49.924759 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 16:54:49 crc kubenswrapper[4835]: I0202 16:54:49.957168 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 16:54:49 crc kubenswrapper[4835]: I0202 16:54:49.958087 4835 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="0e9701e7cca6cc4d7b600f64bf551fb91b761f68d073722f1f539c1eb35bdbe1" exitCode=137 Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.094796 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.506618 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.506705 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571214 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571294 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571328 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571368 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571371 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571440 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571471 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571476 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571778 4835 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571794 4835 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571806 4835 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.571866 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.582417 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.597525 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.639654 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.649498 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.673855 4835 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.673922 4835 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.796623 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.967818 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.967914 4835 scope.go:117] "RemoveContainer" containerID="0e9701e7cca6cc4d7b600f64bf551fb91b761f68d073722f1f539c1eb35bdbe1" Feb 02 16:54:50 crc kubenswrapper[4835]: I0202 16:54:50.968075 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 16:54:51 crc kubenswrapper[4835]: I0202 16:54:51.200095 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Feb 02 16:55:08 crc kubenswrapper[4835]: I0202 16:55:08.074847 4835 generic.go:334] "Generic (PLEG): container finished" podID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerID="31db99e716a54ac578b7df24b69eb8645ce3179e1687f718e49010a20ca329e6" exitCode=0 Feb 02 16:55:08 crc kubenswrapper[4835]: I0202 16:55:08.074937 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" event={"ID":"1bfd176d-b2cc-45f8-a80d-61e391f25163","Type":"ContainerDied","Data":"31db99e716a54ac578b7df24b69eb8645ce3179e1687f718e49010a20ca329e6"} Feb 02 16:55:08 crc kubenswrapper[4835]: I0202 16:55:08.076212 4835 scope.go:117] "RemoveContainer" containerID="31db99e716a54ac578b7df24b69eb8645ce3179e1687f718e49010a20ca329e6" Feb 02 16:55:08 crc kubenswrapper[4835]: I0202 16:55:08.954601 4835 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Feb 02 16:55:09 crc kubenswrapper[4835]: I0202 16:55:09.082348 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" event={"ID":"1bfd176d-b2cc-45f8-a80d-61e391f25163","Type":"ContainerStarted","Data":"4ca5fb927166e1cc92551dfc5fa03c748cb4caa5cab1858effa03c083b110d4f"} Feb 02 16:55:09 crc kubenswrapper[4835]: I0202 16:55:09.082937 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:55:09 crc kubenswrapper[4835]: I0202 16:55:09.088058 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.861341 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-m5xz6"] Feb 02 16:55:29 crc kubenswrapper[4835]: E0202 16:55:29.862172 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.862187 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.862335 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.862836 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.894919 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-m5xz6"] Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.952151 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9cc206e0-b17c-4ad2-b9a4-8c915218a730-installation-pull-secrets\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.952216 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9cc206e0-b17c-4ad2-b9a4-8c915218a730-ca-trust-extracted\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.952311 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9cc206e0-b17c-4ad2-b9a4-8c915218a730-trusted-ca\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.952341 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9cc206e0-b17c-4ad2-b9a4-8c915218a730-registry-certificates\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.952391 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgfvz\" (UniqueName: \"kubernetes.io/projected/9cc206e0-b17c-4ad2-b9a4-8c915218a730-kube-api-access-cgfvz\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.952421 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.952454 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9cc206e0-b17c-4ad2-b9a4-8c915218a730-registry-tls\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.952477 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9cc206e0-b17c-4ad2-b9a4-8c915218a730-bound-sa-token\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:29 crc kubenswrapper[4835]: I0202 16:55:29.972269 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.053490 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgfvz\" (UniqueName: \"kubernetes.io/projected/9cc206e0-b17c-4ad2-b9a4-8c915218a730-kube-api-access-cgfvz\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.053553 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9cc206e0-b17c-4ad2-b9a4-8c915218a730-registry-tls\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.053581 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9cc206e0-b17c-4ad2-b9a4-8c915218a730-bound-sa-token\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.053618 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9cc206e0-b17c-4ad2-b9a4-8c915218a730-installation-pull-secrets\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.053654 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9cc206e0-b17c-4ad2-b9a4-8c915218a730-ca-trust-extracted\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.053697 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9cc206e0-b17c-4ad2-b9a4-8c915218a730-trusted-ca\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.053726 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9cc206e0-b17c-4ad2-b9a4-8c915218a730-registry-certificates\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.055098 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9cc206e0-b17c-4ad2-b9a4-8c915218a730-ca-trust-extracted\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.055325 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9cc206e0-b17c-4ad2-b9a4-8c915218a730-registry-certificates\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.057152 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9cc206e0-b17c-4ad2-b9a4-8c915218a730-trusted-ca\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.060457 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9cc206e0-b17c-4ad2-b9a4-8c915218a730-installation-pull-secrets\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.062055 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9cc206e0-b17c-4ad2-b9a4-8c915218a730-registry-tls\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.075767 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgfvz\" (UniqueName: \"kubernetes.io/projected/9cc206e0-b17c-4ad2-b9a4-8c915218a730-kube-api-access-cgfvz\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.076570 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9cc206e0-b17c-4ad2-b9a4-8c915218a730-bound-sa-token\") pod \"image-registry-66df7c8f76-m5xz6\" (UID: \"9cc206e0-b17c-4ad2-b9a4-8c915218a730\") " pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.208303 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:30 crc kubenswrapper[4835]: I0202 16:55:30.635468 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-m5xz6"] Feb 02 16:55:31 crc kubenswrapper[4835]: I0202 16:55:31.197519 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" event={"ID":"9cc206e0-b17c-4ad2-b9a4-8c915218a730","Type":"ContainerStarted","Data":"aa7cd3e8020c8c897c36fa70190def2d0d289f54a14e6d3b88ce02fcf4c4448f"} Feb 02 16:55:31 crc kubenswrapper[4835]: I0202 16:55:31.197583 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" event={"ID":"9cc206e0-b17c-4ad2-b9a4-8c915218a730","Type":"ContainerStarted","Data":"3dd59a3c4a4e2b7246fc7c1a9fb05be8c5251a194dbab7d147538f96f4b1d721"} Feb 02 16:55:31 crc kubenswrapper[4835]: I0202 16:55:31.197609 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:31 crc kubenswrapper[4835]: I0202 16:55:31.225489 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" podStartSLOduration=2.225470739 podStartE2EDuration="2.225470739s" podCreationTimestamp="2026-02-02 16:55:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:55:31.224365807 +0000 UTC m=+322.845969897" watchObservedRunningTime="2026-02-02 16:55:31.225470739 +0000 UTC m=+322.847074829" Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.845559 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5cbxv"] Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.854891 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jq6s4"] Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.855169 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jq6s4" podUID="5eecd945-3eb3-4384-9836-c1a65b49063f" containerName="registry-server" containerID="cri-o://30811e6f9bcb61a9d17661fefb50cb5dd8bb4fcb3648f82c284c2712ab21d3f4" gracePeriod=30 Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.855463 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5cbxv" podUID="29dff398-e620-4558-854e-3e9fb13f1b25" containerName="registry-server" containerID="cri-o://01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859" gracePeriod=30 Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.869154 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5vz8"] Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.869656 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" podUID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerName="marketplace-operator" containerID="cri-o://4ca5fb927166e1cc92551dfc5fa03c748cb4caa5cab1858effa03c083b110d4f" gracePeriod=30 Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.874692 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w9l6m"] Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.875018 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-w9l6m" podUID="4b92b257-9045-493d-9c64-0e3660e8513a" containerName="registry-server" containerID="cri-o://9d7290b15650e0224e45602dc53526d231b8653eb2eaad04effb640d21eecf5d" gracePeriod=30 Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.891188 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-56ncc"] Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.891644 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-56ncc" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerName="registry-server" containerID="cri-o://89c7081530301f1bc3271517c27bf9405ca5efcc7560c52443e1c05181c0f153" gracePeriod=30 Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.907226 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lmjmd"] Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.908423 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.910791 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lmjmd"] Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.928305 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/redhat-marketplace-w9l6m" podUID="4b92b257-9045-493d-9c64-0e3660e8513a" containerName="registry-server" probeResult="failure" output="" Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.929856 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-w9l6m" podUID="4b92b257-9045-493d-9c64-0e3660e8513a" containerName="registry-server" probeResult="failure" output="" Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.934319 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/624437d3-bcc0-40bc-bc25-d8876722dbc8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lmjmd\" (UID: \"624437d3-bcc0-40bc-bc25-d8876722dbc8\") " pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.935023 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htt7g\" (UniqueName: \"kubernetes.io/projected/624437d3-bcc0-40bc-bc25-d8876722dbc8-kube-api-access-htt7g\") pod \"marketplace-operator-79b997595-lmjmd\" (UID: \"624437d3-bcc0-40bc-bc25-d8876722dbc8\") " pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:49 crc kubenswrapper[4835]: I0202 16:55:49.935108 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/624437d3-bcc0-40bc-bc25-d8876722dbc8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lmjmd\" (UID: \"624437d3-bcc0-40bc-bc25-d8876722dbc8\") " pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.037070 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/624437d3-bcc0-40bc-bc25-d8876722dbc8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lmjmd\" (UID: \"624437d3-bcc0-40bc-bc25-d8876722dbc8\") " pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.037183 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htt7g\" (UniqueName: \"kubernetes.io/projected/624437d3-bcc0-40bc-bc25-d8876722dbc8-kube-api-access-htt7g\") pod \"marketplace-operator-79b997595-lmjmd\" (UID: \"624437d3-bcc0-40bc-bc25-d8876722dbc8\") " pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.037231 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/624437d3-bcc0-40bc-bc25-d8876722dbc8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lmjmd\" (UID: \"624437d3-bcc0-40bc-bc25-d8876722dbc8\") " pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.039885 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/624437d3-bcc0-40bc-bc25-d8876722dbc8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lmjmd\" (UID: \"624437d3-bcc0-40bc-bc25-d8876722dbc8\") " pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.046775 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/624437d3-bcc0-40bc-bc25-d8876722dbc8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lmjmd\" (UID: \"624437d3-bcc0-40bc-bc25-d8876722dbc8\") " pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.057148 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htt7g\" (UniqueName: \"kubernetes.io/projected/624437d3-bcc0-40bc-bc25-d8876722dbc8-kube-api-access-htt7g\") pod \"marketplace-operator-79b997595-lmjmd\" (UID: \"624437d3-bcc0-40bc-bc25-d8876722dbc8\") " pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.216283 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-m5xz6" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.273341 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sfbf9"] Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.301831 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.316498 4835 generic.go:334] "Generic (PLEG): container finished" podID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerID="4ca5fb927166e1cc92551dfc5fa03c748cb4caa5cab1858effa03c083b110d4f" exitCode=0 Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.316559 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" event={"ID":"1bfd176d-b2cc-45f8-a80d-61e391f25163","Type":"ContainerDied","Data":"4ca5fb927166e1cc92551dfc5fa03c748cb4caa5cab1858effa03c083b110d4f"} Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.316588 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" event={"ID":"1bfd176d-b2cc-45f8-a80d-61e391f25163","Type":"ContainerDied","Data":"a11287ec32510918a50401159353db8e067ceb0f364c415e9507d506ad476c42"} Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.316601 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a11287ec32510918a50401159353db8e067ceb0f364c415e9507d506ad476c42" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.316615 4835 scope.go:117] "RemoveContainer" containerID="31db99e716a54ac578b7df24b69eb8645ce3179e1687f718e49010a20ca329e6" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.317595 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.325783 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.329429 4835 generic.go:334] "Generic (PLEG): container finished" podID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerID="89c7081530301f1bc3271517c27bf9405ca5efcc7560c52443e1c05181c0f153" exitCode=0 Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.329509 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-56ncc" event={"ID":"cd498040-6af5-4953-8b1c-ea3803ba1b2a","Type":"ContainerDied","Data":"89c7081530301f1bc3271517c27bf9405ca5efcc7560c52443e1c05181c0f153"} Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.339397 4835 generic.go:334] "Generic (PLEG): container finished" podID="4b92b257-9045-493d-9c64-0e3660e8513a" containerID="9d7290b15650e0224e45602dc53526d231b8653eb2eaad04effb640d21eecf5d" exitCode=0 Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.339483 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w9l6m" event={"ID":"4b92b257-9045-493d-9c64-0e3660e8513a","Type":"ContainerDied","Data":"9d7290b15650e0224e45602dc53526d231b8653eb2eaad04effb640d21eecf5d"} Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.342566 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-trusted-ca\") pod \"1bfd176d-b2cc-45f8-a80d-61e391f25163\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.342618 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xx9wv\" (UniqueName: \"kubernetes.io/projected/29dff398-e620-4558-854e-3e9fb13f1b25-kube-api-access-xx9wv\") pod \"29dff398-e620-4558-854e-3e9fb13f1b25\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.342645 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-catalog-content\") pod \"29dff398-e620-4558-854e-3e9fb13f1b25\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.342702 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpcpc\" (UniqueName: \"kubernetes.io/projected/1bfd176d-b2cc-45f8-a80d-61e391f25163-kube-api-access-kpcpc\") pod \"1bfd176d-b2cc-45f8-a80d-61e391f25163\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.342739 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-utilities\") pod \"29dff398-e620-4558-854e-3e9fb13f1b25\" (UID: \"29dff398-e620-4558-854e-3e9fb13f1b25\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.342786 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-operator-metrics\") pod \"1bfd176d-b2cc-45f8-a80d-61e391f25163\" (UID: \"1bfd176d-b2cc-45f8-a80d-61e391f25163\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.346019 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-utilities" (OuterVolumeSpecName: "utilities") pod "29dff398-e620-4558-854e-3e9fb13f1b25" (UID: "29dff398-e620-4558-854e-3e9fb13f1b25"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.346366 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "1bfd176d-b2cc-45f8-a80d-61e391f25163" (UID: "1bfd176d-b2cc-45f8-a80d-61e391f25163"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.353838 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "1bfd176d-b2cc-45f8-a80d-61e391f25163" (UID: "1bfd176d-b2cc-45f8-a80d-61e391f25163"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.353971 4835 generic.go:334] "Generic (PLEG): container finished" podID="5eecd945-3eb3-4384-9836-c1a65b49063f" containerID="30811e6f9bcb61a9d17661fefb50cb5dd8bb4fcb3648f82c284c2712ab21d3f4" exitCode=0 Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.354046 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jq6s4" event={"ID":"5eecd945-3eb3-4384-9836-c1a65b49063f","Type":"ContainerDied","Data":"30811e6f9bcb61a9d17661fefb50cb5dd8bb4fcb3648f82c284c2712ab21d3f4"} Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.356093 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29dff398-e620-4558-854e-3e9fb13f1b25-kube-api-access-xx9wv" (OuterVolumeSpecName: "kube-api-access-xx9wv") pod "29dff398-e620-4558-854e-3e9fb13f1b25" (UID: "29dff398-e620-4558-854e-3e9fb13f1b25"). InnerVolumeSpecName "kube-api-access-xx9wv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.368191 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bfd176d-b2cc-45f8-a80d-61e391f25163-kube-api-access-kpcpc" (OuterVolumeSpecName: "kube-api-access-kpcpc") pod "1bfd176d-b2cc-45f8-a80d-61e391f25163" (UID: "1bfd176d-b2cc-45f8-a80d-61e391f25163"). InnerVolumeSpecName "kube-api-access-kpcpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.378079 4835 generic.go:334] "Generic (PLEG): container finished" podID="29dff398-e620-4558-854e-3e9fb13f1b25" containerID="01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859" exitCode=0 Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.378138 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5cbxv" event={"ID":"29dff398-e620-4558-854e-3e9fb13f1b25","Type":"ContainerDied","Data":"01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859"} Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.378171 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5cbxv" event={"ID":"29dff398-e620-4558-854e-3e9fb13f1b25","Type":"ContainerDied","Data":"cd0d6dacbe048e20bc7e14c31dc877ec938a45e88fd03da2efa2185d97e06e3a"} Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.378195 4835 scope.go:117] "RemoveContainer" containerID="01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.378351 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5cbxv" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.411243 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29dff398-e620-4558-854e-3e9fb13f1b25" (UID: "29dff398-e620-4558-854e-3e9fb13f1b25"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.425929 4835 scope.go:117] "RemoveContainer" containerID="a7017c1af59e80e278513dcde31721d4e63f2a2ab0a5c6e08480b5f97b5d66c9" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.444037 4835 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.444440 4835 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1bfd176d-b2cc-45f8-a80d-61e391f25163-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.444450 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xx9wv\" (UniqueName: \"kubernetes.io/projected/29dff398-e620-4558-854e-3e9fb13f1b25-kube-api-access-xx9wv\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.444459 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.444468 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpcpc\" (UniqueName: \"kubernetes.io/projected/1bfd176d-b2cc-45f8-a80d-61e391f25163-kube-api-access-kpcpc\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.444478 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29dff398-e620-4558-854e-3e9fb13f1b25-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.447712 4835 scope.go:117] "RemoveContainer" containerID="363ca570d0d837336480ba50eddbf1bd7af8d06b2ddeb0bf58ef2b6da2c49045" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.468246 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.480516 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.494890 4835 scope.go:117] "RemoveContainer" containerID="01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859" Feb 02 16:55:50 crc kubenswrapper[4835]: E0202 16:55:50.495551 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859\": container with ID starting with 01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859 not found: ID does not exist" containerID="01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.495670 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859"} err="failed to get container status \"01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859\": rpc error: code = NotFound desc = could not find container \"01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859\": container with ID starting with 01a17b740306b0fe47f941b4331273e9bd26ccfbd82362cc41a744e2c19a1859 not found: ID does not exist" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.495795 4835 scope.go:117] "RemoveContainer" containerID="a7017c1af59e80e278513dcde31721d4e63f2a2ab0a5c6e08480b5f97b5d66c9" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.495734 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:55:50 crc kubenswrapper[4835]: E0202 16:55:50.496337 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7017c1af59e80e278513dcde31721d4e63f2a2ab0a5c6e08480b5f97b5d66c9\": container with ID starting with a7017c1af59e80e278513dcde31721d4e63f2a2ab0a5c6e08480b5f97b5d66c9 not found: ID does not exist" containerID="a7017c1af59e80e278513dcde31721d4e63f2a2ab0a5c6e08480b5f97b5d66c9" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.496384 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7017c1af59e80e278513dcde31721d4e63f2a2ab0a5c6e08480b5f97b5d66c9"} err="failed to get container status \"a7017c1af59e80e278513dcde31721d4e63f2a2ab0a5c6e08480b5f97b5d66c9\": rpc error: code = NotFound desc = could not find container \"a7017c1af59e80e278513dcde31721d4e63f2a2ab0a5c6e08480b5f97b5d66c9\": container with ID starting with a7017c1af59e80e278513dcde31721d4e63f2a2ab0a5c6e08480b5f97b5d66c9 not found: ID does not exist" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.496425 4835 scope.go:117] "RemoveContainer" containerID="363ca570d0d837336480ba50eddbf1bd7af8d06b2ddeb0bf58ef2b6da2c49045" Feb 02 16:55:50 crc kubenswrapper[4835]: E0202 16:55:50.496718 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"363ca570d0d837336480ba50eddbf1bd7af8d06b2ddeb0bf58ef2b6da2c49045\": container with ID starting with 363ca570d0d837336480ba50eddbf1bd7af8d06b2ddeb0bf58ef2b6da2c49045 not found: ID does not exist" containerID="363ca570d0d837336480ba50eddbf1bd7af8d06b2ddeb0bf58ef2b6da2c49045" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.496842 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"363ca570d0d837336480ba50eddbf1bd7af8d06b2ddeb0bf58ef2b6da2c49045"} err="failed to get container status \"363ca570d0d837336480ba50eddbf1bd7af8d06b2ddeb0bf58ef2b6da2c49045\": rpc error: code = NotFound desc = could not find container \"363ca570d0d837336480ba50eddbf1bd7af8d06b2ddeb0bf58ef2b6da2c49045\": container with ID starting with 363ca570d0d837336480ba50eddbf1bd7af8d06b2ddeb0bf58ef2b6da2c49045 not found: ID does not exist" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.545009 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-utilities\") pod \"5eecd945-3eb3-4384-9836-c1a65b49063f\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.545064 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r76qs\" (UniqueName: \"kubernetes.io/projected/cd498040-6af5-4953-8b1c-ea3803ba1b2a-kube-api-access-r76qs\") pod \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.545093 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69xjz\" (UniqueName: \"kubernetes.io/projected/5eecd945-3eb3-4384-9836-c1a65b49063f-kube-api-access-69xjz\") pod \"5eecd945-3eb3-4384-9836-c1a65b49063f\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.545122 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-utilities\") pod \"4b92b257-9045-493d-9c64-0e3660e8513a\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.545159 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fmd7\" (UniqueName: \"kubernetes.io/projected/4b92b257-9045-493d-9c64-0e3660e8513a-kube-api-access-6fmd7\") pod \"4b92b257-9045-493d-9c64-0e3660e8513a\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.545180 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-catalog-content\") pod \"5eecd945-3eb3-4384-9836-c1a65b49063f\" (UID: \"5eecd945-3eb3-4384-9836-c1a65b49063f\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.545205 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-utilities\") pod \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.545228 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-catalog-content\") pod \"4b92b257-9045-493d-9c64-0e3660e8513a\" (UID: \"4b92b257-9045-493d-9c64-0e3660e8513a\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.545245 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-catalog-content\") pod \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\" (UID: \"cd498040-6af5-4953-8b1c-ea3803ba1b2a\") " Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.546699 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-utilities" (OuterVolumeSpecName: "utilities") pod "cd498040-6af5-4953-8b1c-ea3803ba1b2a" (UID: "cd498040-6af5-4953-8b1c-ea3803ba1b2a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.546813 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-utilities" (OuterVolumeSpecName: "utilities") pod "5eecd945-3eb3-4384-9836-c1a65b49063f" (UID: "5eecd945-3eb3-4384-9836-c1a65b49063f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.548988 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-utilities" (OuterVolumeSpecName: "utilities") pod "4b92b257-9045-493d-9c64-0e3660e8513a" (UID: "4b92b257-9045-493d-9c64-0e3660e8513a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.549480 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b92b257-9045-493d-9c64-0e3660e8513a-kube-api-access-6fmd7" (OuterVolumeSpecName: "kube-api-access-6fmd7") pod "4b92b257-9045-493d-9c64-0e3660e8513a" (UID: "4b92b257-9045-493d-9c64-0e3660e8513a"). InnerVolumeSpecName "kube-api-access-6fmd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.550462 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd498040-6af5-4953-8b1c-ea3803ba1b2a-kube-api-access-r76qs" (OuterVolumeSpecName: "kube-api-access-r76qs") pod "cd498040-6af5-4953-8b1c-ea3803ba1b2a" (UID: "cd498040-6af5-4953-8b1c-ea3803ba1b2a"). InnerVolumeSpecName "kube-api-access-r76qs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.552321 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5eecd945-3eb3-4384-9836-c1a65b49063f-kube-api-access-69xjz" (OuterVolumeSpecName: "kube-api-access-69xjz") pod "5eecd945-3eb3-4384-9836-c1a65b49063f" (UID: "5eecd945-3eb3-4384-9836-c1a65b49063f"). InnerVolumeSpecName "kube-api-access-69xjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.573492 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b92b257-9045-493d-9c64-0e3660e8513a" (UID: "4b92b257-9045-493d-9c64-0e3660e8513a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.597904 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5eecd945-3eb3-4384-9836-c1a65b49063f" (UID: "5eecd945-3eb3-4384-9836-c1a65b49063f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.647100 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r76qs\" (UniqueName: \"kubernetes.io/projected/cd498040-6af5-4953-8b1c-ea3803ba1b2a-kube-api-access-r76qs\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.647526 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69xjz\" (UniqueName: \"kubernetes.io/projected/5eecd945-3eb3-4384-9836-c1a65b49063f-kube-api-access-69xjz\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.647616 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.647690 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.647751 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fmd7\" (UniqueName: \"kubernetes.io/projected/4b92b257-9045-493d-9c64-0e3660e8513a-kube-api-access-6fmd7\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.647810 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.647879 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b92b257-9045-493d-9c64-0e3660e8513a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.647943 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eecd945-3eb3-4384-9836-c1a65b49063f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.657494 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cd498040-6af5-4953-8b1c-ea3803ba1b2a" (UID: "cd498040-6af5-4953-8b1c-ea3803ba1b2a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.706831 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5cbxv"] Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.715918 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5cbxv"] Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.747560 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lmjmd"] Feb 02 16:55:50 crc kubenswrapper[4835]: I0202 16:55:50.749232 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd498040-6af5-4953-8b1c-ea3803ba1b2a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.202385 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29dff398-e620-4558-854e-3e9fb13f1b25" path="/var/lib/kubelet/pods/29dff398-e620-4558-854e-3e9fb13f1b25/volumes" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.272458 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4lbw6"] Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.272808 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dff398-e620-4558-854e-3e9fb13f1b25" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.272848 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dff398-e620-4558-854e-3e9fb13f1b25" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.272862 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerName="marketplace-operator" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.272869 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerName="marketplace-operator" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.272875 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b92b257-9045-493d-9c64-0e3660e8513a" containerName="extract-utilities" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.272881 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b92b257-9045-493d-9c64-0e3660e8513a" containerName="extract-utilities" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.272889 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eecd945-3eb3-4384-9836-c1a65b49063f" containerName="extract-content" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.272896 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eecd945-3eb3-4384-9836-c1a65b49063f" containerName="extract-content" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.272925 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b92b257-9045-493d-9c64-0e3660e8513a" containerName="extract-content" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.272931 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b92b257-9045-493d-9c64-0e3660e8513a" containerName="extract-content" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.272941 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.272947 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.272954 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dff398-e620-4558-854e-3e9fb13f1b25" containerName="extract-utilities" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.272960 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dff398-e620-4558-854e-3e9fb13f1b25" containerName="extract-utilities" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.272967 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerName="marketplace-operator" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.272973 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerName="marketplace-operator" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.272979 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerName="extract-utilities" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273004 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerName="extract-utilities" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.273015 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eecd945-3eb3-4384-9836-c1a65b49063f" containerName="extract-utilities" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273021 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eecd945-3eb3-4384-9836-c1a65b49063f" containerName="extract-utilities" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.273029 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerName="extract-content" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273034 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerName="extract-content" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.273045 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b92b257-9045-493d-9c64-0e3660e8513a" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273050 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b92b257-9045-493d-9c64-0e3660e8513a" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.273057 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eecd945-3eb3-4384-9836-c1a65b49063f" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273081 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eecd945-3eb3-4384-9836-c1a65b49063f" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: E0202 16:55:51.273090 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dff398-e620-4558-854e-3e9fb13f1b25" containerName="extract-content" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273095 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dff398-e620-4558-854e-3e9fb13f1b25" containerName="extract-content" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273205 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerName="marketplace-operator" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273214 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b92b257-9045-493d-9c64-0e3660e8513a" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273243 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eecd945-3eb3-4384-9836-c1a65b49063f" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273251 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273258 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dff398-e620-4558-854e-3e9fb13f1b25" containerName="registry-server" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273441 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bfd176d-b2cc-45f8-a80d-61e391f25163" containerName="marketplace-operator" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.273962 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.277150 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.278764 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4lbw6"] Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.355962 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a48095d7-2712-4ce3-ac7e-0fb66f641e9f-catalog-content\") pod \"certified-operators-4lbw6\" (UID: \"a48095d7-2712-4ce3-ac7e-0fb66f641e9f\") " pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.356036 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nnzw\" (UniqueName: \"kubernetes.io/projected/a48095d7-2712-4ce3-ac7e-0fb66f641e9f-kube-api-access-5nnzw\") pod \"certified-operators-4lbw6\" (UID: \"a48095d7-2712-4ce3-ac7e-0fb66f641e9f\") " pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.356139 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a48095d7-2712-4ce3-ac7e-0fb66f641e9f-utilities\") pod \"certified-operators-4lbw6\" (UID: \"a48095d7-2712-4ce3-ac7e-0fb66f641e9f\") " pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.386551 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w9l6m" event={"ID":"4b92b257-9045-493d-9c64-0e3660e8513a","Type":"ContainerDied","Data":"425afc8d3a1a2edd1234dcc683c8d11c2625455132006f7249efa56e634f628c"} Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.386627 4835 scope.go:117] "RemoveContainer" containerID="9d7290b15650e0224e45602dc53526d231b8653eb2eaad04effb640d21eecf5d" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.386750 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w9l6m" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.393208 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jq6s4" event={"ID":"5eecd945-3eb3-4384-9836-c1a65b49063f","Type":"ContainerDied","Data":"201240fd3393adbd5b80ea8fd9e7522edb3592122d19ac4311c4a8f368bfb740"} Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.393351 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jq6s4" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.398944 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k5vz8" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.402645 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-56ncc" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.402663 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-56ncc" event={"ID":"cd498040-6af5-4953-8b1c-ea3803ba1b2a","Type":"ContainerDied","Data":"70d6d6c958a7269694d4f3f3c7e2aece40f4a5c0e96ef8536a90e9ac26dbacfb"} Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.406652 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" event={"ID":"624437d3-bcc0-40bc-bc25-d8876722dbc8","Type":"ContainerStarted","Data":"668837ab17455f22f4235b5b785b55de81f640aad292955a8c7369b6edc5ab8a"} Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.406723 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" event={"ID":"624437d3-bcc0-40bc-bc25-d8876722dbc8","Type":"ContainerStarted","Data":"9c93b36baf4cf507f686dc6de1283d4443d13c6be831a21e6295afda22d8a32c"} Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.407037 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.410989 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.419920 4835 scope.go:117] "RemoveContainer" containerID="bec2a76b8a9774c0badf633d0461214631e35effac3fd9eb5ccfb55598915b8c" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.422919 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w9l6m"] Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.430366 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-w9l6m"] Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.439316 4835 scope.go:117] "RemoveContainer" containerID="88b375175c8d07d9509a593ea8e6e22ff0213a2aece018b34b14e50e4201da4c" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.456891 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a48095d7-2712-4ce3-ac7e-0fb66f641e9f-utilities\") pod \"certified-operators-4lbw6\" (UID: \"a48095d7-2712-4ce3-ac7e-0fb66f641e9f\") " pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.456981 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a48095d7-2712-4ce3-ac7e-0fb66f641e9f-catalog-content\") pod \"certified-operators-4lbw6\" (UID: \"a48095d7-2712-4ce3-ac7e-0fb66f641e9f\") " pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.457035 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nnzw\" (UniqueName: \"kubernetes.io/projected/a48095d7-2712-4ce3-ac7e-0fb66f641e9f-kube-api-access-5nnzw\") pod \"certified-operators-4lbw6\" (UID: \"a48095d7-2712-4ce3-ac7e-0fb66f641e9f\") " pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.458057 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a48095d7-2712-4ce3-ac7e-0fb66f641e9f-utilities\") pod \"certified-operators-4lbw6\" (UID: \"a48095d7-2712-4ce3-ac7e-0fb66f641e9f\") " pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.458575 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a48095d7-2712-4ce3-ac7e-0fb66f641e9f-catalog-content\") pod \"certified-operators-4lbw6\" (UID: \"a48095d7-2712-4ce3-ac7e-0fb66f641e9f\") " pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.459913 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5vz8"] Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.470410 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5vz8"] Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.482790 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-56ncc"] Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.482892 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-56ncc"] Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.484630 4835 scope.go:117] "RemoveContainer" containerID="30811e6f9bcb61a9d17661fefb50cb5dd8bb4fcb3648f82c284c2712ab21d3f4" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.494822 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nnzw\" (UniqueName: \"kubernetes.io/projected/a48095d7-2712-4ce3-ac7e-0fb66f641e9f-kube-api-access-5nnzw\") pod \"certified-operators-4lbw6\" (UID: \"a48095d7-2712-4ce3-ac7e-0fb66f641e9f\") " pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.497029 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jq6s4"] Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.501362 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jq6s4"] Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.506546 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-lmjmd" podStartSLOduration=2.506533157 podStartE2EDuration="2.506533157s" podCreationTimestamp="2026-02-02 16:55:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 16:55:51.504080506 +0000 UTC m=+343.125684596" watchObservedRunningTime="2026-02-02 16:55:51.506533157 +0000 UTC m=+343.128137247" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.521128 4835 scope.go:117] "RemoveContainer" containerID="db1c0b746df017128cec2795af3413c1971203c8b583d9383b34228f4bcb6af6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.545447 4835 scope.go:117] "RemoveContainer" containerID="7160b0ddc6c1e9d747f6356dc068f09629beb43d94e29368039bf99287224b3c" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.570889 4835 scope.go:117] "RemoveContainer" containerID="89c7081530301f1bc3271517c27bf9405ca5efcc7560c52443e1c05181c0f153" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.585586 4835 scope.go:117] "RemoveContainer" containerID="4b69cd929ce47f91a5ca980d55ebc62a9fab2ee4729abac24226b2d0cac8dea3" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.590567 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.600229 4835 scope.go:117] "RemoveContainer" containerID="2d39276d62ff338c21add11ebc8211f6b33f5d7b15835598fe73e02e70a76d62" Feb 02 16:55:51 crc kubenswrapper[4835]: I0202 16:55:51.784861 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4lbw6"] Feb 02 16:55:52 crc kubenswrapper[4835]: I0202 16:55:52.414604 4835 generic.go:334] "Generic (PLEG): container finished" podID="a48095d7-2712-4ce3-ac7e-0fb66f641e9f" containerID="04702472b2a9371a189e5ed02c6e4d0ece9d2787222341815e3139ad9f528d72" exitCode=0 Feb 02 16:55:52 crc kubenswrapper[4835]: I0202 16:55:52.414668 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4lbw6" event={"ID":"a48095d7-2712-4ce3-ac7e-0fb66f641e9f","Type":"ContainerDied","Data":"04702472b2a9371a189e5ed02c6e4d0ece9d2787222341815e3139ad9f528d72"} Feb 02 16:55:52 crc kubenswrapper[4835]: I0202 16:55:52.415013 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4lbw6" event={"ID":"a48095d7-2712-4ce3-ac7e-0fb66f641e9f","Type":"ContainerStarted","Data":"668a0b78870429d58bf850136e71594097dde7155871a574210a872f6e6b4cbb"} Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.066553 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4mqtb"] Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.067864 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.069628 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.078087 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4mqtb"] Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.081247 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vddjx\" (UniqueName: \"kubernetes.io/projected/6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a-kube-api-access-vddjx\") pod \"redhat-marketplace-4mqtb\" (UID: \"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a\") " pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.081316 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a-utilities\") pod \"redhat-marketplace-4mqtb\" (UID: \"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a\") " pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.081362 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a-catalog-content\") pod \"redhat-marketplace-4mqtb\" (UID: \"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a\") " pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.182071 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a-utilities\") pod \"redhat-marketplace-4mqtb\" (UID: \"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a\") " pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.182154 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a-catalog-content\") pod \"redhat-marketplace-4mqtb\" (UID: \"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a\") " pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.182201 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vddjx\" (UniqueName: \"kubernetes.io/projected/6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a-kube-api-access-vddjx\") pod \"redhat-marketplace-4mqtb\" (UID: \"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a\") " pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.182570 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a-utilities\") pod \"redhat-marketplace-4mqtb\" (UID: \"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a\") " pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.182984 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a-catalog-content\") pod \"redhat-marketplace-4mqtb\" (UID: \"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a\") " pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.200473 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bfd176d-b2cc-45f8-a80d-61e391f25163" path="/var/lib/kubelet/pods/1bfd176d-b2cc-45f8-a80d-61e391f25163/volumes" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.201706 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b92b257-9045-493d-9c64-0e3660e8513a" path="/var/lib/kubelet/pods/4b92b257-9045-493d-9c64-0e3660e8513a/volumes" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.202986 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5eecd945-3eb3-4384-9836-c1a65b49063f" path="/var/lib/kubelet/pods/5eecd945-3eb3-4384-9836-c1a65b49063f/volumes" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.205425 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd498040-6af5-4953-8b1c-ea3803ba1b2a" path="/var/lib/kubelet/pods/cd498040-6af5-4953-8b1c-ea3803ba1b2a/volumes" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.205849 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vddjx\" (UniqueName: \"kubernetes.io/projected/6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a-kube-api-access-vddjx\") pod \"redhat-marketplace-4mqtb\" (UID: \"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a\") " pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.405508 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.426524 4835 generic.go:334] "Generic (PLEG): container finished" podID="a48095d7-2712-4ce3-ac7e-0fb66f641e9f" containerID="c0c9f3e95650d29fb96175747a70275b2c3cc5ee48e973305ae05ee428d50b64" exitCode=0 Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.426589 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4lbw6" event={"ID":"a48095d7-2712-4ce3-ac7e-0fb66f641e9f","Type":"ContainerDied","Data":"c0c9f3e95650d29fb96175747a70275b2c3cc5ee48e973305ae05ee428d50b64"} Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.616894 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4mqtb"] Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.678252 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xshph"] Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.679185 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.681181 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.686719 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xshph"] Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.795155 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ca5614b-4fa3-4a18-b40f-64369990a74a-utilities\") pod \"redhat-operators-xshph\" (UID: \"2ca5614b-4fa3-4a18-b40f-64369990a74a\") " pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.795218 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfn9d\" (UniqueName: \"kubernetes.io/projected/2ca5614b-4fa3-4a18-b40f-64369990a74a-kube-api-access-tfn9d\") pod \"redhat-operators-xshph\" (UID: \"2ca5614b-4fa3-4a18-b40f-64369990a74a\") " pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.795429 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ca5614b-4fa3-4a18-b40f-64369990a74a-catalog-content\") pod \"redhat-operators-xshph\" (UID: \"2ca5614b-4fa3-4a18-b40f-64369990a74a\") " pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.896662 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ca5614b-4fa3-4a18-b40f-64369990a74a-utilities\") pod \"redhat-operators-xshph\" (UID: \"2ca5614b-4fa3-4a18-b40f-64369990a74a\") " pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.896723 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfn9d\" (UniqueName: \"kubernetes.io/projected/2ca5614b-4fa3-4a18-b40f-64369990a74a-kube-api-access-tfn9d\") pod \"redhat-operators-xshph\" (UID: \"2ca5614b-4fa3-4a18-b40f-64369990a74a\") " pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.896777 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ca5614b-4fa3-4a18-b40f-64369990a74a-catalog-content\") pod \"redhat-operators-xshph\" (UID: \"2ca5614b-4fa3-4a18-b40f-64369990a74a\") " pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.897361 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ca5614b-4fa3-4a18-b40f-64369990a74a-utilities\") pod \"redhat-operators-xshph\" (UID: \"2ca5614b-4fa3-4a18-b40f-64369990a74a\") " pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.897393 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ca5614b-4fa3-4a18-b40f-64369990a74a-catalog-content\") pod \"redhat-operators-xshph\" (UID: \"2ca5614b-4fa3-4a18-b40f-64369990a74a\") " pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:55:53 crc kubenswrapper[4835]: I0202 16:55:53.922004 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfn9d\" (UniqueName: \"kubernetes.io/projected/2ca5614b-4fa3-4a18-b40f-64369990a74a-kube-api-access-tfn9d\") pod \"redhat-operators-xshph\" (UID: \"2ca5614b-4fa3-4a18-b40f-64369990a74a\") " pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:55:54 crc kubenswrapper[4835]: I0202 16:55:54.014890 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:55:54 crc kubenswrapper[4835]: I0202 16:55:54.432680 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xshph"] Feb 02 16:55:54 crc kubenswrapper[4835]: I0202 16:55:54.435686 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4lbw6" event={"ID":"a48095d7-2712-4ce3-ac7e-0fb66f641e9f","Type":"ContainerStarted","Data":"10f67b85745d7276abb6cec9a7b182829a4756a0ef6e84db9577db39bc14d41c"} Feb 02 16:55:54 crc kubenswrapper[4835]: I0202 16:55:54.439004 4835 generic.go:334] "Generic (PLEG): container finished" podID="6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a" containerID="9744527a7f81ee8a124daf23ab79595a1ae7cac9e660a6a80bb7fb2f6c4ba326" exitCode=0 Feb 02 16:55:54 crc kubenswrapper[4835]: I0202 16:55:54.439046 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4mqtb" event={"ID":"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a","Type":"ContainerDied","Data":"9744527a7f81ee8a124daf23ab79595a1ae7cac9e660a6a80bb7fb2f6c4ba326"} Feb 02 16:55:54 crc kubenswrapper[4835]: I0202 16:55:54.439077 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4mqtb" event={"ID":"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a","Type":"ContainerStarted","Data":"abed2710ea622f00455d761451ca3876cab3feb8b349b7d38bd4d344e33a10ff"} Feb 02 16:55:54 crc kubenswrapper[4835]: I0202 16:55:54.458654 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4lbw6" podStartSLOduration=2.025641178 podStartE2EDuration="3.458634347s" podCreationTimestamp="2026-02-02 16:55:51 +0000 UTC" firstStartedPulling="2026-02-02 16:55:52.417164331 +0000 UTC m=+344.038768411" lastFinishedPulling="2026-02-02 16:55:53.85015749 +0000 UTC m=+345.471761580" observedRunningTime="2026-02-02 16:55:54.455748454 +0000 UTC m=+346.077352534" watchObservedRunningTime="2026-02-02 16:55:54.458634347 +0000 UTC m=+346.080238437" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.450150 4835 generic.go:334] "Generic (PLEG): container finished" podID="6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a" containerID="4f39dc19a926474d2becd49b6b3d6f37ecd5269acd636b65336b1568687829f2" exitCode=0 Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.450251 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4mqtb" event={"ID":"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a","Type":"ContainerDied","Data":"4f39dc19a926474d2becd49b6b3d6f37ecd5269acd636b65336b1568687829f2"} Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.455380 4835 generic.go:334] "Generic (PLEG): container finished" podID="2ca5614b-4fa3-4a18-b40f-64369990a74a" containerID="4b91080bec87c5b0f56dcd9e95263ebc89bce3ffadd93de012710b85d15788ed" exitCode=0 Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.455565 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xshph" event={"ID":"2ca5614b-4fa3-4a18-b40f-64369990a74a","Type":"ContainerDied","Data":"4b91080bec87c5b0f56dcd9e95263ebc89bce3ffadd93de012710b85d15788ed"} Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.455622 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xshph" event={"ID":"2ca5614b-4fa3-4a18-b40f-64369990a74a","Type":"ContainerStarted","Data":"55f37f545cbaee0a794df6be42ef53f539c714df70bae2e8d1d558075e15df44"} Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.474971 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jssq8"] Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.476305 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.478010 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.482665 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jssq8"] Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.618890 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66552ba5-2809-433e-b245-bc22ace6c699-catalog-content\") pod \"community-operators-jssq8\" (UID: \"66552ba5-2809-433e-b245-bc22ace6c699\") " pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.618997 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66552ba5-2809-433e-b245-bc22ace6c699-utilities\") pod \"community-operators-jssq8\" (UID: \"66552ba5-2809-433e-b245-bc22ace6c699\") " pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.619104 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psd8x\" (UniqueName: \"kubernetes.io/projected/66552ba5-2809-433e-b245-bc22ace6c699-kube-api-access-psd8x\") pod \"community-operators-jssq8\" (UID: \"66552ba5-2809-433e-b245-bc22ace6c699\") " pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.720236 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66552ba5-2809-433e-b245-bc22ace6c699-catalog-content\") pod \"community-operators-jssq8\" (UID: \"66552ba5-2809-433e-b245-bc22ace6c699\") " pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.720317 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66552ba5-2809-433e-b245-bc22ace6c699-utilities\") pod \"community-operators-jssq8\" (UID: \"66552ba5-2809-433e-b245-bc22ace6c699\") " pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.720351 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psd8x\" (UniqueName: \"kubernetes.io/projected/66552ba5-2809-433e-b245-bc22ace6c699-kube-api-access-psd8x\") pod \"community-operators-jssq8\" (UID: \"66552ba5-2809-433e-b245-bc22ace6c699\") " pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.720884 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66552ba5-2809-433e-b245-bc22ace6c699-utilities\") pod \"community-operators-jssq8\" (UID: \"66552ba5-2809-433e-b245-bc22ace6c699\") " pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.720977 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66552ba5-2809-433e-b245-bc22ace6c699-catalog-content\") pod \"community-operators-jssq8\" (UID: \"66552ba5-2809-433e-b245-bc22ace6c699\") " pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.740162 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psd8x\" (UniqueName: \"kubernetes.io/projected/66552ba5-2809-433e-b245-bc22ace6c699-kube-api-access-psd8x\") pod \"community-operators-jssq8\" (UID: \"66552ba5-2809-433e-b245-bc22ace6c699\") " pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:55:55 crc kubenswrapper[4835]: I0202 16:55:55.790557 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:55:56 crc kubenswrapper[4835]: I0202 16:55:56.244700 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jssq8"] Feb 02 16:55:56 crc kubenswrapper[4835]: I0202 16:55:56.461718 4835 generic.go:334] "Generic (PLEG): container finished" podID="66552ba5-2809-433e-b245-bc22ace6c699" containerID="e29a893a08ae7f16e3680997cb796193b1381ea71382f8354ba6325333de6ece" exitCode=0 Feb 02 16:55:56 crc kubenswrapper[4835]: I0202 16:55:56.461783 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jssq8" event={"ID":"66552ba5-2809-433e-b245-bc22ace6c699","Type":"ContainerDied","Data":"e29a893a08ae7f16e3680997cb796193b1381ea71382f8354ba6325333de6ece"} Feb 02 16:55:56 crc kubenswrapper[4835]: I0202 16:55:56.461843 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jssq8" event={"ID":"66552ba5-2809-433e-b245-bc22ace6c699","Type":"ContainerStarted","Data":"fbd01bfed5146ace8788656932189d0cc90f13c2df19635c077fdd611c33163a"} Feb 02 16:55:56 crc kubenswrapper[4835]: I0202 16:55:56.464182 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4mqtb" event={"ID":"6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a","Type":"ContainerStarted","Data":"95197e481e50b6d78c1ebb4ae6d45ad42a17de03516cc318db65afed2f616455"} Feb 02 16:55:56 crc kubenswrapper[4835]: I0202 16:55:56.473619 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xshph" event={"ID":"2ca5614b-4fa3-4a18-b40f-64369990a74a","Type":"ContainerStarted","Data":"a8e1225cbf1fc594e72b6e0ed682595ce6e7b25763e44b8e606c15950e54b170"} Feb 02 16:55:56 crc kubenswrapper[4835]: I0202 16:55:56.507551 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4mqtb" podStartSLOduration=2.010126075 podStartE2EDuration="3.507532478s" podCreationTimestamp="2026-02-02 16:55:53 +0000 UTC" firstStartedPulling="2026-02-02 16:55:54.443690855 +0000 UTC m=+346.065294945" lastFinishedPulling="2026-02-02 16:55:55.941097248 +0000 UTC m=+347.562701348" observedRunningTime="2026-02-02 16:55:56.505161839 +0000 UTC m=+348.126765919" watchObservedRunningTime="2026-02-02 16:55:56.507532478 +0000 UTC m=+348.129136558" Feb 02 16:55:57 crc kubenswrapper[4835]: I0202 16:55:57.480215 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jssq8" event={"ID":"66552ba5-2809-433e-b245-bc22ace6c699","Type":"ContainerStarted","Data":"6d708d85f7c92cdacf16fbfad39abaa6c85c8b54dda6017794eb0b81568786b3"} Feb 02 16:55:57 crc kubenswrapper[4835]: I0202 16:55:57.481791 4835 generic.go:334] "Generic (PLEG): container finished" podID="2ca5614b-4fa3-4a18-b40f-64369990a74a" containerID="a8e1225cbf1fc594e72b6e0ed682595ce6e7b25763e44b8e606c15950e54b170" exitCode=0 Feb 02 16:55:57 crc kubenswrapper[4835]: I0202 16:55:57.481963 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xshph" event={"ID":"2ca5614b-4fa3-4a18-b40f-64369990a74a","Type":"ContainerDied","Data":"a8e1225cbf1fc594e72b6e0ed682595ce6e7b25763e44b8e606c15950e54b170"} Feb 02 16:55:58 crc kubenswrapper[4835]: I0202 16:55:58.489437 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xshph" event={"ID":"2ca5614b-4fa3-4a18-b40f-64369990a74a","Type":"ContainerStarted","Data":"10773c3fab4b4babac62cfadb42b60f18cb9470354055e8561d1283bd3c5bc1b"} Feb 02 16:55:58 crc kubenswrapper[4835]: I0202 16:55:58.491531 4835 generic.go:334] "Generic (PLEG): container finished" podID="66552ba5-2809-433e-b245-bc22ace6c699" containerID="6d708d85f7c92cdacf16fbfad39abaa6c85c8b54dda6017794eb0b81568786b3" exitCode=0 Feb 02 16:55:58 crc kubenswrapper[4835]: I0202 16:55:58.491558 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jssq8" event={"ID":"66552ba5-2809-433e-b245-bc22ace6c699","Type":"ContainerDied","Data":"6d708d85f7c92cdacf16fbfad39abaa6c85c8b54dda6017794eb0b81568786b3"} Feb 02 16:55:58 crc kubenswrapper[4835]: I0202 16:55:58.523252 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xshph" podStartSLOduration=3.073551233 podStartE2EDuration="5.523233758s" podCreationTimestamp="2026-02-02 16:55:53 +0000 UTC" firstStartedPulling="2026-02-02 16:55:55.461223324 +0000 UTC m=+347.082827414" lastFinishedPulling="2026-02-02 16:55:57.910905859 +0000 UTC m=+349.532509939" observedRunningTime="2026-02-02 16:55:58.517640436 +0000 UTC m=+350.139244516" watchObservedRunningTime="2026-02-02 16:55:58.523233758 +0000 UTC m=+350.144837848" Feb 02 16:55:59 crc kubenswrapper[4835]: I0202 16:55:59.500300 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jssq8" event={"ID":"66552ba5-2809-433e-b245-bc22ace6c699","Type":"ContainerStarted","Data":"ba3170a450fac3e68ded63a0542be25d042e080a827a2bf47b6451824eb5265e"} Feb 02 16:56:01 crc kubenswrapper[4835]: I0202 16:56:01.591392 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:56:01 crc kubenswrapper[4835]: I0202 16:56:01.593047 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:56:01 crc kubenswrapper[4835]: I0202 16:56:01.636521 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:56:01 crc kubenswrapper[4835]: I0202 16:56:01.659873 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jssq8" podStartSLOduration=4.219526405 podStartE2EDuration="6.65985395s" podCreationTimestamp="2026-02-02 16:55:55 +0000 UTC" firstStartedPulling="2026-02-02 16:55:56.463598485 +0000 UTC m=+348.085202565" lastFinishedPulling="2026-02-02 16:55:58.90392603 +0000 UTC m=+350.525530110" observedRunningTime="2026-02-02 16:55:59.519930924 +0000 UTC m=+351.141535004" watchObservedRunningTime="2026-02-02 16:56:01.65985395 +0000 UTC m=+353.281458030" Feb 02 16:56:02 crc kubenswrapper[4835]: I0202 16:56:02.557383 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4lbw6" Feb 02 16:56:03 crc kubenswrapper[4835]: I0202 16:56:03.405981 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:56:03 crc kubenswrapper[4835]: I0202 16:56:03.406304 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:56:03 crc kubenswrapper[4835]: I0202 16:56:03.448789 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:56:03 crc kubenswrapper[4835]: I0202 16:56:03.557641 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4mqtb" Feb 02 16:56:04 crc kubenswrapper[4835]: I0202 16:56:04.015094 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:56:04 crc kubenswrapper[4835]: I0202 16:56:04.015174 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:56:04 crc kubenswrapper[4835]: I0202 16:56:04.061537 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:56:04 crc kubenswrapper[4835]: I0202 16:56:04.568359 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xshph" Feb 02 16:56:05 crc kubenswrapper[4835]: I0202 16:56:05.790961 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:56:05 crc kubenswrapper[4835]: I0202 16:56:05.791334 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:56:05 crc kubenswrapper[4835]: I0202 16:56:05.837143 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:56:06 crc kubenswrapper[4835]: I0202 16:56:06.573670 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jssq8" Feb 02 16:56:14 crc kubenswrapper[4835]: I0202 16:56:14.870595 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 16:56:14 crc kubenswrapper[4835]: I0202 16:56:14.871177 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.374166 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" podUID="b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" containerName="registry" containerID="cri-o://0f8773226ce829e53ffa90f6677a610fb4607d3fb50ba119d06845dd2095ebd3" gracePeriod=30 Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.600442 4835 generic.go:334] "Generic (PLEG): container finished" podID="b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" containerID="0f8773226ce829e53ffa90f6677a610fb4607d3fb50ba119d06845dd2095ebd3" exitCode=0 Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.600617 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" event={"ID":"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1","Type":"ContainerDied","Data":"0f8773226ce829e53ffa90f6677a610fb4607d3fb50ba119d06845dd2095ebd3"} Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.704577 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.804587 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-tls\") pod \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.804692 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbt6z\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-kube-api-access-pbt6z\") pod \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.804809 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-ca-trust-extracted\") pod \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.804865 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-bound-sa-token\") pod \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.805106 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.805146 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-certificates\") pod \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.806095 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.806169 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.805219 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-trusted-ca\") pod \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.806302 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-installation-pull-secrets\") pod \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\" (UID: \"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1\") " Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.806607 4835 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.806640 4835 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.814313 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.814549 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.814736 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-kube-api-access-pbt6z" (OuterVolumeSpecName: "kube-api-access-pbt6z") pod "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1"). InnerVolumeSpecName "kube-api-access-pbt6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.817703 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.818293 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.823043 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" (UID: "b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.907395 4835 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.907423 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbt6z\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-kube-api-access-pbt6z\") on node \"crc\" DevicePath \"\"" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.907434 4835 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.907443 4835 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 16:56:15 crc kubenswrapper[4835]: I0202 16:56:15.907451 4835 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 02 16:56:16 crc kubenswrapper[4835]: I0202 16:56:16.611702 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" event={"ID":"b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1","Type":"ContainerDied","Data":"78016984562ae2d46ff4893e9323de65048cc84d85e2fdd1cabf887b1244f42e"} Feb 02 16:56:16 crc kubenswrapper[4835]: I0202 16:56:16.611801 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-sfbf9" Feb 02 16:56:16 crc kubenswrapper[4835]: I0202 16:56:16.612297 4835 scope.go:117] "RemoveContainer" containerID="0f8773226ce829e53ffa90f6677a610fb4607d3fb50ba119d06845dd2095ebd3" Feb 02 16:56:16 crc kubenswrapper[4835]: I0202 16:56:16.652749 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sfbf9"] Feb 02 16:56:16 crc kubenswrapper[4835]: I0202 16:56:16.658029 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-sfbf9"] Feb 02 16:56:17 crc kubenswrapper[4835]: I0202 16:56:17.200319 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" path="/var/lib/kubelet/pods/b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1/volumes" Feb 02 16:56:44 crc kubenswrapper[4835]: I0202 16:56:44.870945 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 16:56:44 crc kubenswrapper[4835]: I0202 16:56:44.871490 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 16:57:14 crc kubenswrapper[4835]: I0202 16:57:14.869917 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 16:57:14 crc kubenswrapper[4835]: I0202 16:57:14.871469 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 16:57:14 crc kubenswrapper[4835]: I0202 16:57:14.871560 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 16:57:14 crc kubenswrapper[4835]: I0202 16:57:14.872182 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b71282d2471b88b91c03e4f9e85d7d1903f9682be4501b8d87dfc0ade7c2e31e"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 16:57:14 crc kubenswrapper[4835]: I0202 16:57:14.872246 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://b71282d2471b88b91c03e4f9e85d7d1903f9682be4501b8d87dfc0ade7c2e31e" gracePeriod=600 Feb 02 16:57:15 crc kubenswrapper[4835]: I0202 16:57:15.942037 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="b71282d2471b88b91c03e4f9e85d7d1903f9682be4501b8d87dfc0ade7c2e31e" exitCode=0 Feb 02 16:57:15 crc kubenswrapper[4835]: I0202 16:57:15.942346 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"b71282d2471b88b91c03e4f9e85d7d1903f9682be4501b8d87dfc0ade7c2e31e"} Feb 02 16:57:15 crc kubenswrapper[4835]: I0202 16:57:15.942455 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"e0c6bc82781affe05cbe6c90c01b78ec721f1da56df0b87b02d404a5427cd6a8"} Feb 02 16:57:15 crc kubenswrapper[4835]: I0202 16:57:15.942499 4835 scope.go:117] "RemoveContainer" containerID="ec80298db1c7f702ff3a9992169eaccdd36234072b1afa4bb59df29d05ec8db8" Feb 02 16:59:44 crc kubenswrapper[4835]: I0202 16:59:44.870080 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 16:59:44 crc kubenswrapper[4835]: I0202 16:59:44.870913 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.181506 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5"] Feb 02 17:00:00 crc kubenswrapper[4835]: E0202 17:00:00.182178 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" containerName="registry" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.182191 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" containerName="registry" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.182306 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2ba2804-15a2-4d2c-b0db-a2b5f24bf2a1" containerName="registry" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.182702 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.187563 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.189199 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.192973 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5"] Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.212767 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-config-volume\") pod \"collect-profiles-29500860-d8hf5\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.212832 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx4qz\" (UniqueName: \"kubernetes.io/projected/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-kube-api-access-gx4qz\") pod \"collect-profiles-29500860-d8hf5\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.212882 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-secret-volume\") pod \"collect-profiles-29500860-d8hf5\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.314347 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx4qz\" (UniqueName: \"kubernetes.io/projected/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-kube-api-access-gx4qz\") pod \"collect-profiles-29500860-d8hf5\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.314398 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-secret-volume\") pod \"collect-profiles-29500860-d8hf5\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.314476 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-config-volume\") pod \"collect-profiles-29500860-d8hf5\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.315239 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-config-volume\") pod \"collect-profiles-29500860-d8hf5\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.322192 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-secret-volume\") pod \"collect-profiles-29500860-d8hf5\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.332615 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx4qz\" (UniqueName: \"kubernetes.io/projected/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-kube-api-access-gx4qz\") pod \"collect-profiles-29500860-d8hf5\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.500108 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.918547 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5"] Feb 02 17:00:00 crc kubenswrapper[4835]: W0202 17:00:00.928135 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c032d81_fc19_4a2b_a6ae_fdf9ab979cc2.slice/crio-f25daf7f68cd9915d9f534b1eb5f62feaa6f5dfabe93c999614139076112cb59 WatchSource:0}: Error finding container f25daf7f68cd9915d9f534b1eb5f62feaa6f5dfabe93c999614139076112cb59: Status 404 returned error can't find the container with id f25daf7f68cd9915d9f534b1eb5f62feaa6f5dfabe93c999614139076112cb59 Feb 02 17:00:00 crc kubenswrapper[4835]: I0202 17:00:00.964121 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" event={"ID":"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2","Type":"ContainerStarted","Data":"f25daf7f68cd9915d9f534b1eb5f62feaa6f5dfabe93c999614139076112cb59"} Feb 02 17:00:01 crc kubenswrapper[4835]: I0202 17:00:01.973912 4835 generic.go:334] "Generic (PLEG): container finished" podID="4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2" containerID="ed4a7e5ef8c0975e1ef40e129f19928157c810d32a79cac717df18f6f4358aa5" exitCode=0 Feb 02 17:00:01 crc kubenswrapper[4835]: I0202 17:00:01.975335 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" event={"ID":"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2","Type":"ContainerDied","Data":"ed4a7e5ef8c0975e1ef40e129f19928157c810d32a79cac717df18f6f4358aa5"} Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.226134 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.264006 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gx4qz\" (UniqueName: \"kubernetes.io/projected/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-kube-api-access-gx4qz\") pod \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.264056 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-config-volume\") pod \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.264103 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-secret-volume\") pod \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\" (UID: \"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2\") " Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.264952 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-config-volume" (OuterVolumeSpecName: "config-volume") pod "4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2" (UID: "4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.270167 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-kube-api-access-gx4qz" (OuterVolumeSpecName: "kube-api-access-gx4qz") pod "4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2" (UID: "4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2"). InnerVolumeSpecName "kube-api-access-gx4qz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.270915 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2" (UID: "4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.366140 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gx4qz\" (UniqueName: \"kubernetes.io/projected/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-kube-api-access-gx4qz\") on node \"crc\" DevicePath \"\"" Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.366269 4835 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.366327 4835 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.989637 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" event={"ID":"4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2","Type":"ContainerDied","Data":"f25daf7f68cd9915d9f534b1eb5f62feaa6f5dfabe93c999614139076112cb59"} Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.989678 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f25daf7f68cd9915d9f534b1eb5f62feaa6f5dfabe93c999614139076112cb59" Feb 02 17:00:03 crc kubenswrapper[4835]: I0202 17:00:03.989736 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5" Feb 02 17:00:14 crc kubenswrapper[4835]: I0202 17:00:14.870752 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:00:14 crc kubenswrapper[4835]: I0202 17:00:14.871421 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:00:44 crc kubenswrapper[4835]: I0202 17:00:44.870798 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:00:44 crc kubenswrapper[4835]: I0202 17:00:44.871578 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:00:44 crc kubenswrapper[4835]: I0202 17:00:44.871633 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:00:44 crc kubenswrapper[4835]: I0202 17:00:44.872427 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e0c6bc82781affe05cbe6c90c01b78ec721f1da56df0b87b02d404a5427cd6a8"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:00:44 crc kubenswrapper[4835]: I0202 17:00:44.872490 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://e0c6bc82781affe05cbe6c90c01b78ec721f1da56df0b87b02d404a5427cd6a8" gracePeriod=600 Feb 02 17:00:45 crc kubenswrapper[4835]: I0202 17:00:45.263198 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="e0c6bc82781affe05cbe6c90c01b78ec721f1da56df0b87b02d404a5427cd6a8" exitCode=0 Feb 02 17:00:45 crc kubenswrapper[4835]: I0202 17:00:45.263689 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"e0c6bc82781affe05cbe6c90c01b78ec721f1da56df0b87b02d404a5427cd6a8"} Feb 02 17:00:45 crc kubenswrapper[4835]: I0202 17:00:45.263731 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"e554e578a75a247804791314d623e05f6091a40930f6f9c01d754a6a53db79cc"} Feb 02 17:00:45 crc kubenswrapper[4835]: I0202 17:00:45.263750 4835 scope.go:117] "RemoveContainer" containerID="b71282d2471b88b91c03e4f9e85d7d1903f9682be4501b8d87dfc0ade7c2e31e" Feb 02 17:01:09 crc kubenswrapper[4835]: I0202 17:01:09.503519 4835 scope.go:117] "RemoveContainer" containerID="4ca5fb927166e1cc92551dfc5fa03c748cb4caa5cab1858effa03c083b110d4f" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.941681 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-h5kzj"] Feb 02 17:01:25 crc kubenswrapper[4835]: E0202 17:01:25.942472 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2" containerName="collect-profiles" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.942487 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2" containerName="collect-profiles" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.942588 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2" containerName="collect-profiles" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.943002 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-h5kzj" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.945730 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-rvjmd"] Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.946602 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-rvjmd" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.946931 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.947046 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.947403 4835 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-g9j7p" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.948837 4835 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-6p52k" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.954730 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-h5kzj"] Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.961159 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-jjmjw"] Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.961877 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-jjmjw" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.963936 4835 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-25fzp" Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.979446 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-rvjmd"] Feb 02 17:01:25 crc kubenswrapper[4835]: I0202 17:01:25.985181 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-jjmjw"] Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.095461 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xzlw\" (UniqueName: \"kubernetes.io/projected/1f888664-2f9b-4bd3-bef9-dd8b65a2ab93-kube-api-access-4xzlw\") pod \"cert-manager-webhook-687f57d79b-jjmjw\" (UID: \"1f888664-2f9b-4bd3-bef9-dd8b65a2ab93\") " pod="cert-manager/cert-manager-webhook-687f57d79b-jjmjw" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.095521 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jjzg\" (UniqueName: \"kubernetes.io/projected/2d062425-7c9e-48fe-a566-bf101b0349cc-kube-api-access-8jjzg\") pod \"cert-manager-cainjector-cf98fcc89-h5kzj\" (UID: \"2d062425-7c9e-48fe-a566-bf101b0349cc\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-h5kzj" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.095546 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n75bz\" (UniqueName: \"kubernetes.io/projected/65887296-1b4f-40f4-80f1-9889e34070cc-kube-api-access-n75bz\") pod \"cert-manager-858654f9db-rvjmd\" (UID: \"65887296-1b4f-40f4-80f1-9889e34070cc\") " pod="cert-manager/cert-manager-858654f9db-rvjmd" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.196767 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jjzg\" (UniqueName: \"kubernetes.io/projected/2d062425-7c9e-48fe-a566-bf101b0349cc-kube-api-access-8jjzg\") pod \"cert-manager-cainjector-cf98fcc89-h5kzj\" (UID: \"2d062425-7c9e-48fe-a566-bf101b0349cc\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-h5kzj" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.196834 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n75bz\" (UniqueName: \"kubernetes.io/projected/65887296-1b4f-40f4-80f1-9889e34070cc-kube-api-access-n75bz\") pod \"cert-manager-858654f9db-rvjmd\" (UID: \"65887296-1b4f-40f4-80f1-9889e34070cc\") " pod="cert-manager/cert-manager-858654f9db-rvjmd" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.196922 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xzlw\" (UniqueName: \"kubernetes.io/projected/1f888664-2f9b-4bd3-bef9-dd8b65a2ab93-kube-api-access-4xzlw\") pod \"cert-manager-webhook-687f57d79b-jjmjw\" (UID: \"1f888664-2f9b-4bd3-bef9-dd8b65a2ab93\") " pod="cert-manager/cert-manager-webhook-687f57d79b-jjmjw" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.214918 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jjzg\" (UniqueName: \"kubernetes.io/projected/2d062425-7c9e-48fe-a566-bf101b0349cc-kube-api-access-8jjzg\") pod \"cert-manager-cainjector-cf98fcc89-h5kzj\" (UID: \"2d062425-7c9e-48fe-a566-bf101b0349cc\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-h5kzj" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.214927 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n75bz\" (UniqueName: \"kubernetes.io/projected/65887296-1b4f-40f4-80f1-9889e34070cc-kube-api-access-n75bz\") pod \"cert-manager-858654f9db-rvjmd\" (UID: \"65887296-1b4f-40f4-80f1-9889e34070cc\") " pod="cert-manager/cert-manager-858654f9db-rvjmd" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.218075 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xzlw\" (UniqueName: \"kubernetes.io/projected/1f888664-2f9b-4bd3-bef9-dd8b65a2ab93-kube-api-access-4xzlw\") pod \"cert-manager-webhook-687f57d79b-jjmjw\" (UID: \"1f888664-2f9b-4bd3-bef9-dd8b65a2ab93\") " pod="cert-manager/cert-manager-webhook-687f57d79b-jjmjw" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.260793 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-h5kzj" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.270612 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-rvjmd" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.282041 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-jjmjw" Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.484360 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-h5kzj"] Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.494507 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.535876 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-h5kzj" event={"ID":"2d062425-7c9e-48fe-a566-bf101b0349cc","Type":"ContainerStarted","Data":"7b41121686a8980795e3b01308407d4d50270c4067c96d42e667292220e97d85"} Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.726246 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-rvjmd"] Feb 02 17:01:26 crc kubenswrapper[4835]: W0202 17:01:26.728952 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65887296_1b4f_40f4_80f1_9889e34070cc.slice/crio-89fe54c7def21febd2a4e6b4d29461f4779bad066f1aa0054e2ecddd53ef223c WatchSource:0}: Error finding container 89fe54c7def21febd2a4e6b4d29461f4779bad066f1aa0054e2ecddd53ef223c: Status 404 returned error can't find the container with id 89fe54c7def21febd2a4e6b4d29461f4779bad066f1aa0054e2ecddd53ef223c Feb 02 17:01:26 crc kubenswrapper[4835]: I0202 17:01:26.734558 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-jjmjw"] Feb 02 17:01:27 crc kubenswrapper[4835]: I0202 17:01:27.912331 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-rvjmd" event={"ID":"65887296-1b4f-40f4-80f1-9889e34070cc","Type":"ContainerStarted","Data":"89fe54c7def21febd2a4e6b4d29461f4779bad066f1aa0054e2ecddd53ef223c"} Feb 02 17:01:27 crc kubenswrapper[4835]: I0202 17:01:27.915720 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-jjmjw" event={"ID":"1f888664-2f9b-4bd3-bef9-dd8b65a2ab93","Type":"ContainerStarted","Data":"65395f8292255097c6fb9a6340594b1f960869b582929b6f53ca8b0a276ef645"} Feb 02 17:01:30 crc kubenswrapper[4835]: I0202 17:01:30.932882 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-h5kzj" event={"ID":"2d062425-7c9e-48fe-a566-bf101b0349cc","Type":"ContainerStarted","Data":"32abb615586ceb8fd09628523b5c2951579e1ad8a7fc58d848f06c87ea72f5c3"} Feb 02 17:01:30 crc kubenswrapper[4835]: I0202 17:01:30.948991 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-h5kzj" podStartSLOduration=2.178785236 podStartE2EDuration="5.948968481s" podCreationTimestamp="2026-02-02 17:01:25 +0000 UTC" firstStartedPulling="2026-02-02 17:01:26.494233983 +0000 UTC m=+678.115838063" lastFinishedPulling="2026-02-02 17:01:30.264417218 +0000 UTC m=+681.886021308" observedRunningTime="2026-02-02 17:01:30.94788842 +0000 UTC m=+682.569492520" watchObservedRunningTime="2026-02-02 17:01:30.948968481 +0000 UTC m=+682.570572571" Feb 02 17:01:31 crc kubenswrapper[4835]: I0202 17:01:31.940238 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-jjmjw" event={"ID":"1f888664-2f9b-4bd3-bef9-dd8b65a2ab93","Type":"ContainerStarted","Data":"07a93ced5e8be66cb933ce87c23aa67d3967cab5b6e6b1d828ab1bd29a9d4012"} Feb 02 17:01:31 crc kubenswrapper[4835]: I0202 17:01:31.940593 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-jjmjw" Feb 02 17:01:31 crc kubenswrapper[4835]: I0202 17:01:31.943733 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-rvjmd" event={"ID":"65887296-1b4f-40f4-80f1-9889e34070cc","Type":"ContainerStarted","Data":"0a7b53dbeff78414c5253ff46e66db309457103c2d83f3b60f70f847db9969fd"} Feb 02 17:01:31 crc kubenswrapper[4835]: I0202 17:01:31.956961 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-jjmjw" podStartSLOduration=2.062731823 podStartE2EDuration="6.956942491s" podCreationTimestamp="2026-02-02 17:01:25 +0000 UTC" firstStartedPulling="2026-02-02 17:01:26.742372403 +0000 UTC m=+678.363976483" lastFinishedPulling="2026-02-02 17:01:31.636583071 +0000 UTC m=+683.258187151" observedRunningTime="2026-02-02 17:01:31.955202491 +0000 UTC m=+683.576806571" watchObservedRunningTime="2026-02-02 17:01:31.956942491 +0000 UTC m=+683.578546581" Feb 02 17:01:31 crc kubenswrapper[4835]: I0202 17:01:31.971126 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-rvjmd" podStartSLOduration=2.058611375 podStartE2EDuration="6.971107905s" podCreationTimestamp="2026-02-02 17:01:25 +0000 UTC" firstStartedPulling="2026-02-02 17:01:26.734024865 +0000 UTC m=+678.355628945" lastFinishedPulling="2026-02-02 17:01:31.646521395 +0000 UTC m=+683.268125475" observedRunningTime="2026-02-02 17:01:31.96915341 +0000 UTC m=+683.590757490" watchObservedRunningTime="2026-02-02 17:01:31.971107905 +0000 UTC m=+683.592711985" Feb 02 17:01:36 crc kubenswrapper[4835]: I0202 17:01:36.286370 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-jjmjw" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.328746 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-88n4w"] Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.329745 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovn-controller" containerID="cri-o://77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828" gracePeriod=30 Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.329899 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="northd" containerID="cri-o://39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811" gracePeriod=30 Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.329892 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="sbdb" containerID="cri-o://8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a" gracePeriod=30 Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.329958 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506" gracePeriod=30 Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.330006 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="kube-rbac-proxy-node" containerID="cri-o://696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399" gracePeriod=30 Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.330052 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovn-acl-logging" containerID="cri-o://f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41" gracePeriod=30 Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.330225 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="nbdb" containerID="cri-o://133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1" gracePeriod=30 Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.388014 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" containerID="cri-o://944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562" gracePeriod=30 Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.640526 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a is running failed: container process not found" containerID="8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.640559 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1 is running failed: container process not found" containerID="133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.641357 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a is running failed: container process not found" containerID="8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.641447 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1 is running failed: container process not found" containerID="133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.641665 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a is running failed: container process not found" containerID="8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.641731 4835 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="sbdb" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.642233 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1 is running failed: container process not found" containerID="133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.642292 4835 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1 is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="nbdb" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.668717 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/3.log" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.673108 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovn-acl-logging/0.log" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.673868 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovn-controller/0.log" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.674714 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746545 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mxwh9"] Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.746793 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="nbdb" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746809 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="nbdb" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.746820 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovn-acl-logging" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746829 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovn-acl-logging" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.746839 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="kubecfg-setup" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746848 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="kubecfg-setup" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.746867 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746876 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.746884 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovn-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746891 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovn-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.746901 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="sbdb" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746909 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="sbdb" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.746919 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746927 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.746934 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746942 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.746954 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746961 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.746970 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746977 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.746987 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="kube-rbac-proxy-node" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.746995 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="kube-rbac-proxy-node" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.747009 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="northd" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747016 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="northd" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747123 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovn-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747137 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="kube-rbac-proxy-node" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747187 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747197 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747203 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747212 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747221 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="northd" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747230 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="sbdb" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747239 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="nbdb" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747248 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovn-acl-logging" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747255 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: E0202 17:02:00.747398 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747408 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.747528 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerName="ovnkube-controller" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.749350 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.750712 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-bin\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.750753 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-var-lib-openvswitch\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.750782 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-systemd-units\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.750823 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-slash\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.750844 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-kubelet\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.750850 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.750875 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-env-overrides\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.750895 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-slash" (OuterVolumeSpecName: "host-slash") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.750898 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-config\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.750947 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-etc-openvswitch\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.750975 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-node-log\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751003 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-script-lib\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751032 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-var-lib-cni-networks-ovn-kubernetes\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751058 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-ovn-kubernetes\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751086 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-log-socket\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751106 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-ovn\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751132 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-netd\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751154 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-systemd\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751179 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtvrr\" (UniqueName: \"kubernetes.io/projected/0cbaf0a8-c75d-4059-9874-d0a193090578-kube-api-access-xtvrr\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751197 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-openvswitch\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751221 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0cbaf0a8-c75d-4059-9874-d0a193090578-ovn-node-metrics-cert\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751243 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-netns\") pod \"0cbaf0a8-c75d-4059-9874-d0a193090578\" (UID: \"0cbaf0a8-c75d-4059-9874-d0a193090578\") " Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751392 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751402 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751437 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751480 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751473 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-node-log" (OuterVolumeSpecName: "node-log") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751500 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751501 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751518 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751458 4835 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751751 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751790 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751818 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751888 4835 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-slash\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751877 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751932 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-log-socket" (OuterVolumeSpecName: "log-socket") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.751932 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.752245 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.757716 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cbaf0a8-c75d-4059-9874-d0a193090578-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.758071 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cbaf0a8-c75d-4059-9874-d0a193090578-kube-api-access-xtvrr" (OuterVolumeSpecName: "kube-api-access-xtvrr") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "kube-api-access-xtvrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.767438 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "0cbaf0a8-c75d-4059-9874-d0a193090578" (UID: "0cbaf0a8-c75d-4059-9874-d0a193090578"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.852815 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-var-lib-openvswitch\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.852881 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.852920 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-run-openvswitch\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.852954 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-cni-netd\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.853040 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-slash\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.853187 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-ovn-node-metrics-cert\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.853352 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-cni-bin\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.853489 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-run-ovn-kubernetes\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.853651 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp7gk\" (UniqueName: \"kubernetes.io/projected/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-kube-api-access-vp7gk\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.853724 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-env-overrides\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.853783 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-node-log\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.853831 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-systemd-units\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.853879 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-etc-openvswitch\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.853907 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-ovnkube-config\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.853960 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-run-netns\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854019 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-ovnkube-script-lib\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854044 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-kubelet\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854072 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-run-ovn\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854132 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-run-systemd\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854176 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-log-socket\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854320 4835 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-systemd\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854343 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtvrr\" (UniqueName: \"kubernetes.io/projected/0cbaf0a8-c75d-4059-9874-d0a193090578-kube-api-access-xtvrr\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854361 4835 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854381 4835 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0cbaf0a8-c75d-4059-9874-d0a193090578-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854400 4835 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-netns\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854419 4835 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-bin\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854436 4835 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-systemd-units\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854454 4835 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-kubelet\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854471 4835 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854488 4835 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854505 4835 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854520 4835 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-node-log\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854536 4835 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0cbaf0a8-c75d-4059-9874-d0a193090578-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854554 4835 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854572 4835 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854589 4835 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854605 4835 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-log-socket\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.854621 4835 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0cbaf0a8-c75d-4059-9874-d0a193090578-host-cni-netd\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955347 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-run-netns\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955431 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-ovnkube-script-lib\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955470 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-kubelet\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955471 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-run-netns\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955511 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-run-ovn\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955582 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-run-ovn\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955586 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-kubelet\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955642 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-run-systemd\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955679 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-run-systemd\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955681 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-log-socket\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955714 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-log-socket\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955808 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-var-lib-openvswitch\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955858 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955883 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-run-openvswitch\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955924 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-cni-netd\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955947 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-slash\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.955971 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-ovn-node-metrics-cert\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956018 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-cni-bin\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956039 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-run-ovn-kubernetes\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956069 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp7gk\" (UniqueName: \"kubernetes.io/projected/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-kube-api-access-vp7gk\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956108 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-env-overrides\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956142 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-node-log\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956168 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-systemd-units\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956196 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-ovnkube-config\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956220 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-etc-openvswitch\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956344 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-etc-openvswitch\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956379 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-var-lib-openvswitch\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956391 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-ovnkube-script-lib\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956409 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956441 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-systemd-units\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956445 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-node-log\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956478 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-slash\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956477 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-run-openvswitch\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956482 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-run-ovn-kubernetes\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956523 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-cni-bin\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.956503 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-host-cni-netd\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.957241 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-env-overrides\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.957504 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-ovnkube-config\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.961649 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-ovn-node-metrics-cert\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:00 crc kubenswrapper[4835]: I0202 17:02:00.973817 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp7gk\" (UniqueName: \"kubernetes.io/projected/60a9a17a-4aea-4abd-9c8d-1c1f6c010379-kube-api-access-vp7gk\") pod \"ovnkube-node-mxwh9\" (UID: \"60a9a17a-4aea-4abd-9c8d-1c1f6c010379\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.068956 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.135128 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" event={"ID":"60a9a17a-4aea-4abd-9c8d-1c1f6c010379","Type":"ContainerStarted","Data":"c30b942bdc42b9f075315909cae9f4e19da10e5bf3784a039cc2efeb7ad89712"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.138039 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovnkube-controller/3.log" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.140936 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovn-acl-logging/0.log" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.141666 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-88n4w_0cbaf0a8-c75d-4059-9874-d0a193090578/ovn-controller/0.log" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142218 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562" exitCode=0 Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142303 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a" exitCode=0 Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142319 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1" exitCode=0 Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142306 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142377 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142394 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142407 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142428 4835 scope.go:117] "RemoveContainer" containerID="944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142333 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811" exitCode=0 Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142460 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506" exitCode=0 Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142474 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399" exitCode=0 Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142486 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41" exitCode=143 Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142499 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cbaf0a8-c75d-4059-9874-d0a193090578" containerID="77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828" exitCode=143 Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142480 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142602 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142650 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142672 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142688 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142697 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142707 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142715 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142723 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142732 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142744 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142758 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142771 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142788 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142799 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142809 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142817 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142826 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142835 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142844 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142853 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142862 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142872 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142885 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142901 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142913 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142922 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142932 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142942 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142951 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142961 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142971 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142980 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.142989 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.143003 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-88n4w" event={"ID":"0cbaf0a8-c75d-4059-9874-d0a193090578","Type":"ContainerDied","Data":"6a640515cf696d9d65af8d25e723f5274c228cc2a2b9cb7f959bd22d68c5b853"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.143019 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.143031 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.143041 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.143051 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.143060 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.143069 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.143079 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.143088 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.143098 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.143107 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.145651 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hzst6_92da4528-a699-45b1-aed0-d49a382bf0a1/kube-multus/2.log" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.145999 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hzst6_92da4528-a699-45b1-aed0-d49a382bf0a1/kube-multus/1.log" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.146024 4835 generic.go:334] "Generic (PLEG): container finished" podID="92da4528-a699-45b1-aed0-d49a382bf0a1" containerID="561a1a9beb47443e82e9257c7fd897da040fa5f33b07d13929eb4206b7e50a75" exitCode=2 Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.146048 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hzst6" event={"ID":"92da4528-a699-45b1-aed0-d49a382bf0a1","Type":"ContainerDied","Data":"561a1a9beb47443e82e9257c7fd897da040fa5f33b07d13929eb4206b7e50a75"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.146068 4835 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab"} Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.146387 4835 scope.go:117] "RemoveContainer" containerID="561a1a9beb47443e82e9257c7fd897da040fa5f33b07d13929eb4206b7e50a75" Feb 02 17:02:01 crc kubenswrapper[4835]: E0202 17:02:01.146605 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-hzst6_openshift-multus(92da4528-a699-45b1-aed0-d49a382bf0a1)\"" pod="openshift-multus/multus-hzst6" podUID="92da4528-a699-45b1-aed0-d49a382bf0a1" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.173636 4835 scope.go:117] "RemoveContainer" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.198331 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-88n4w"] Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.198782 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-88n4w"] Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.205909 4835 scope.go:117] "RemoveContainer" containerID="8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.221373 4835 scope.go:117] "RemoveContainer" containerID="133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.287048 4835 scope.go:117] "RemoveContainer" containerID="39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.311626 4835 scope.go:117] "RemoveContainer" containerID="c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.328847 4835 scope.go:117] "RemoveContainer" containerID="696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.340788 4835 scope.go:117] "RemoveContainer" containerID="f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.353917 4835 scope.go:117] "RemoveContainer" containerID="77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.370465 4835 scope.go:117] "RemoveContainer" containerID="c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.384235 4835 scope.go:117] "RemoveContainer" containerID="944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562" Feb 02 17:02:01 crc kubenswrapper[4835]: E0202 17:02:01.384613 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562\": container with ID starting with 944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562 not found: ID does not exist" containerID="944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.384647 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562"} err="failed to get container status \"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562\": rpc error: code = NotFound desc = could not find container \"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562\": container with ID starting with 944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.384669 4835 scope.go:117] "RemoveContainer" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" Feb 02 17:02:01 crc kubenswrapper[4835]: E0202 17:02:01.384981 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\": container with ID starting with 3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289 not found: ID does not exist" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.385009 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289"} err="failed to get container status \"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\": rpc error: code = NotFound desc = could not find container \"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\": container with ID starting with 3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.385024 4835 scope.go:117] "RemoveContainer" containerID="8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a" Feb 02 17:02:01 crc kubenswrapper[4835]: E0202 17:02:01.385260 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\": container with ID starting with 8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a not found: ID does not exist" containerID="8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.385299 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a"} err="failed to get container status \"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\": rpc error: code = NotFound desc = could not find container \"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\": container with ID starting with 8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.385315 4835 scope.go:117] "RemoveContainer" containerID="133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1" Feb 02 17:02:01 crc kubenswrapper[4835]: E0202 17:02:01.385573 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\": container with ID starting with 133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1 not found: ID does not exist" containerID="133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.385601 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1"} err="failed to get container status \"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\": rpc error: code = NotFound desc = could not find container \"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\": container with ID starting with 133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.385614 4835 scope.go:117] "RemoveContainer" containerID="39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811" Feb 02 17:02:01 crc kubenswrapper[4835]: E0202 17:02:01.385992 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\": container with ID starting with 39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811 not found: ID does not exist" containerID="39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.386014 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811"} err="failed to get container status \"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\": rpc error: code = NotFound desc = could not find container \"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\": container with ID starting with 39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.386028 4835 scope.go:117] "RemoveContainer" containerID="c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506" Feb 02 17:02:01 crc kubenswrapper[4835]: E0202 17:02:01.386415 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\": container with ID starting with c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506 not found: ID does not exist" containerID="c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.386441 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506"} err="failed to get container status \"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\": rpc error: code = NotFound desc = could not find container \"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\": container with ID starting with c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.386460 4835 scope.go:117] "RemoveContainer" containerID="696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399" Feb 02 17:02:01 crc kubenswrapper[4835]: E0202 17:02:01.386943 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\": container with ID starting with 696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399 not found: ID does not exist" containerID="696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.386971 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399"} err="failed to get container status \"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\": rpc error: code = NotFound desc = could not find container \"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\": container with ID starting with 696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.386990 4835 scope.go:117] "RemoveContainer" containerID="f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41" Feb 02 17:02:01 crc kubenswrapper[4835]: E0202 17:02:01.387291 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\": container with ID starting with f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41 not found: ID does not exist" containerID="f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.387314 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41"} err="failed to get container status \"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\": rpc error: code = NotFound desc = could not find container \"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\": container with ID starting with f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.387328 4835 scope.go:117] "RemoveContainer" containerID="77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828" Feb 02 17:02:01 crc kubenswrapper[4835]: E0202 17:02:01.387519 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\": container with ID starting with 77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828 not found: ID does not exist" containerID="77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.387547 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828"} err="failed to get container status \"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\": rpc error: code = NotFound desc = could not find container \"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\": container with ID starting with 77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.387564 4835 scope.go:117] "RemoveContainer" containerID="c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8" Feb 02 17:02:01 crc kubenswrapper[4835]: E0202 17:02:01.387818 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\": container with ID starting with c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8 not found: ID does not exist" containerID="c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.387840 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8"} err="failed to get container status \"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\": rpc error: code = NotFound desc = could not find container \"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\": container with ID starting with c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.387853 4835 scope.go:117] "RemoveContainer" containerID="944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.388070 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562"} err="failed to get container status \"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562\": rpc error: code = NotFound desc = could not find container \"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562\": container with ID starting with 944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.388091 4835 scope.go:117] "RemoveContainer" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.388350 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289"} err="failed to get container status \"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\": rpc error: code = NotFound desc = could not find container \"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\": container with ID starting with 3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.388406 4835 scope.go:117] "RemoveContainer" containerID="8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.388735 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a"} err="failed to get container status \"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\": rpc error: code = NotFound desc = could not find container \"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\": container with ID starting with 8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.388761 4835 scope.go:117] "RemoveContainer" containerID="133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.388976 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1"} err="failed to get container status \"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\": rpc error: code = NotFound desc = could not find container \"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\": container with ID starting with 133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.388998 4835 scope.go:117] "RemoveContainer" containerID="39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.389249 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811"} err="failed to get container status \"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\": rpc error: code = NotFound desc = could not find container \"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\": container with ID starting with 39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.389288 4835 scope.go:117] "RemoveContainer" containerID="c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.389492 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506"} err="failed to get container status \"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\": rpc error: code = NotFound desc = could not find container \"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\": container with ID starting with c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.389512 4835 scope.go:117] "RemoveContainer" containerID="696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.389816 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399"} err="failed to get container status \"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\": rpc error: code = NotFound desc = could not find container \"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\": container with ID starting with 696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.389842 4835 scope.go:117] "RemoveContainer" containerID="f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.390069 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41"} err="failed to get container status \"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\": rpc error: code = NotFound desc = could not find container \"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\": container with ID starting with f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.390092 4835 scope.go:117] "RemoveContainer" containerID="77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.390356 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828"} err="failed to get container status \"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\": rpc error: code = NotFound desc = could not find container \"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\": container with ID starting with 77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.390380 4835 scope.go:117] "RemoveContainer" containerID="c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.390789 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8"} err="failed to get container status \"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\": rpc error: code = NotFound desc = could not find container \"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\": container with ID starting with c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.390821 4835 scope.go:117] "RemoveContainer" containerID="944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.391101 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562"} err="failed to get container status \"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562\": rpc error: code = NotFound desc = could not find container \"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562\": container with ID starting with 944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.391125 4835 scope.go:117] "RemoveContainer" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.391381 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289"} err="failed to get container status \"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\": rpc error: code = NotFound desc = could not find container \"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\": container with ID starting with 3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.391404 4835 scope.go:117] "RemoveContainer" containerID="8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.391610 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a"} err="failed to get container status \"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\": rpc error: code = NotFound desc = could not find container \"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\": container with ID starting with 8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.391639 4835 scope.go:117] "RemoveContainer" containerID="133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.391945 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1"} err="failed to get container status \"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\": rpc error: code = NotFound desc = could not find container \"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\": container with ID starting with 133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.391976 4835 scope.go:117] "RemoveContainer" containerID="39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.392227 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811"} err="failed to get container status \"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\": rpc error: code = NotFound desc = could not find container \"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\": container with ID starting with 39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.392247 4835 scope.go:117] "RemoveContainer" containerID="c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.392533 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506"} err="failed to get container status \"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\": rpc error: code = NotFound desc = could not find container \"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\": container with ID starting with c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.392551 4835 scope.go:117] "RemoveContainer" containerID="696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.392803 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399"} err="failed to get container status \"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\": rpc error: code = NotFound desc = could not find container \"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\": container with ID starting with 696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.392830 4835 scope.go:117] "RemoveContainer" containerID="f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.393043 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41"} err="failed to get container status \"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\": rpc error: code = NotFound desc = could not find container \"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\": container with ID starting with f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.393061 4835 scope.go:117] "RemoveContainer" containerID="77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.393246 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828"} err="failed to get container status \"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\": rpc error: code = NotFound desc = could not find container \"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\": container with ID starting with 77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.393262 4835 scope.go:117] "RemoveContainer" containerID="c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.393543 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8"} err="failed to get container status \"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\": rpc error: code = NotFound desc = could not find container \"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\": container with ID starting with c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.393582 4835 scope.go:117] "RemoveContainer" containerID="944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.393832 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562"} err="failed to get container status \"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562\": rpc error: code = NotFound desc = could not find container \"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562\": container with ID starting with 944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.393857 4835 scope.go:117] "RemoveContainer" containerID="3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.394138 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289"} err="failed to get container status \"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\": rpc error: code = NotFound desc = could not find container \"3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289\": container with ID starting with 3468fd37e3237f77056e57a9c559075fb47298945b592ff43eff14b9a1d43289 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.394163 4835 scope.go:117] "RemoveContainer" containerID="8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.394368 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a"} err="failed to get container status \"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\": rpc error: code = NotFound desc = could not find container \"8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a\": container with ID starting with 8752c5307b9fa717a1e17c1acff730dff9dc8da71ff3d41e7102660124858f1a not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.394395 4835 scope.go:117] "RemoveContainer" containerID="133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.394605 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1"} err="failed to get container status \"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\": rpc error: code = NotFound desc = could not find container \"133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1\": container with ID starting with 133f758aa751f81b3d57e0de31d0d0bc665507bdce7dcc68be8edc0ae3b393d1 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.394631 4835 scope.go:117] "RemoveContainer" containerID="39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.394895 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811"} err="failed to get container status \"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\": rpc error: code = NotFound desc = could not find container \"39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811\": container with ID starting with 39c8a5cc3154ce0087c183a0fec9f4756c888a7e8852b9049e85a0ccb1741811 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.394921 4835 scope.go:117] "RemoveContainer" containerID="c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.395145 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506"} err="failed to get container status \"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\": rpc error: code = NotFound desc = could not find container \"c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506\": container with ID starting with c73109cce0cb5160a2e87238f2ef48289e1796fbedf5f0271b5524e2c6356506 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.395167 4835 scope.go:117] "RemoveContainer" containerID="696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.395410 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399"} err="failed to get container status \"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\": rpc error: code = NotFound desc = could not find container \"696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399\": container with ID starting with 696f3b212d7a3e5f31202132f2554cda30a33ad653ec301339232bc711129399 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.395435 4835 scope.go:117] "RemoveContainer" containerID="f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.395789 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41"} err="failed to get container status \"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\": rpc error: code = NotFound desc = could not find container \"f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41\": container with ID starting with f0305ee3bcdd6bd5dcf6f608250503fb96c31b99e0081fdb4c7c8c3e8896fc41 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.395808 4835 scope.go:117] "RemoveContainer" containerID="77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.396112 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828"} err="failed to get container status \"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\": rpc error: code = NotFound desc = could not find container \"77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828\": container with ID starting with 77bc751a836a7010adb8e7bc7731fad18a7200dc722157a55fc8b330cf9c6828 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.396130 4835 scope.go:117] "RemoveContainer" containerID="c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.396429 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8"} err="failed to get container status \"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\": rpc error: code = NotFound desc = could not find container \"c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8\": container with ID starting with c5a7194854f3adbe03a504c64cad87e7249118c54f47968fed53f460a12016e8 not found: ID does not exist" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.396457 4835 scope.go:117] "RemoveContainer" containerID="944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562" Feb 02 17:02:01 crc kubenswrapper[4835]: I0202 17:02:01.396797 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562"} err="failed to get container status \"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562\": rpc error: code = NotFound desc = could not find container \"944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562\": container with ID starting with 944b321c5d9464c744bc079f844c3ee984fb2fa0221551f70cc20ae77a4c7562 not found: ID does not exist" Feb 02 17:02:02 crc kubenswrapper[4835]: I0202 17:02:02.153351 4835 generic.go:334] "Generic (PLEG): container finished" podID="60a9a17a-4aea-4abd-9c8d-1c1f6c010379" containerID="8d949252d4575ef94e776a3ff8bc3b5a5ac6af61d2cc39f88ce2c7087dc64e11" exitCode=0 Feb 02 17:02:02 crc kubenswrapper[4835]: I0202 17:02:02.153428 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" event={"ID":"60a9a17a-4aea-4abd-9c8d-1c1f6c010379","Type":"ContainerDied","Data":"8d949252d4575ef94e776a3ff8bc3b5a5ac6af61d2cc39f88ce2c7087dc64e11"} Feb 02 17:02:03 crc kubenswrapper[4835]: I0202 17:02:03.175790 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" event={"ID":"60a9a17a-4aea-4abd-9c8d-1c1f6c010379","Type":"ContainerStarted","Data":"c5cc221deba92a30d2ab787596bf093b1dd13ddfebee32ea490a2b1e21106eea"} Feb 02 17:02:03 crc kubenswrapper[4835]: I0202 17:02:03.176102 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" event={"ID":"60a9a17a-4aea-4abd-9c8d-1c1f6c010379","Type":"ContainerStarted","Data":"b2587ca64303365e959fd62d841741f53a11660618330ee60f35ab4592202d24"} Feb 02 17:02:03 crc kubenswrapper[4835]: I0202 17:02:03.176112 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" event={"ID":"60a9a17a-4aea-4abd-9c8d-1c1f6c010379","Type":"ContainerStarted","Data":"b6fbc725fd43ec216d00a0747a8a81d49c0ac49f5ef8f0c85f6605c681f3934f"} Feb 02 17:02:03 crc kubenswrapper[4835]: I0202 17:02:03.176124 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" event={"ID":"60a9a17a-4aea-4abd-9c8d-1c1f6c010379","Type":"ContainerStarted","Data":"38575af5c2df210c0d492685b10612b3bb9fb9aced620465a9eb58a0c46cd799"} Feb 02 17:02:03 crc kubenswrapper[4835]: I0202 17:02:03.176132 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" event={"ID":"60a9a17a-4aea-4abd-9c8d-1c1f6c010379","Type":"ContainerStarted","Data":"5c1a981f7360d6d9823e986a1f69340968f2359518f196b15c73477d84930f64"} Feb 02 17:02:03 crc kubenswrapper[4835]: I0202 17:02:03.176142 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" event={"ID":"60a9a17a-4aea-4abd-9c8d-1c1f6c010379","Type":"ContainerStarted","Data":"bacca794014e51b21a2772ed24ac59ba350664a20af80d4185c6c2f0521d5ca9"} Feb 02 17:02:03 crc kubenswrapper[4835]: I0202 17:02:03.196922 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cbaf0a8-c75d-4059-9874-d0a193090578" path="/var/lib/kubelet/pods/0cbaf0a8-c75d-4059-9874-d0a193090578/volumes" Feb 02 17:02:06 crc kubenswrapper[4835]: I0202 17:02:06.197880 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" event={"ID":"60a9a17a-4aea-4abd-9c8d-1c1f6c010379","Type":"ContainerStarted","Data":"165aa112f99a6138bd58d6b1758a24542e01a80960567471cb62af20670df0ea"} Feb 02 17:02:08 crc kubenswrapper[4835]: I0202 17:02:08.215858 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" event={"ID":"60a9a17a-4aea-4abd-9c8d-1c1f6c010379","Type":"ContainerStarted","Data":"c63d09669a13b05eea3e54f818dc29c02adf718f417c2429e2b65f04e8d9488f"} Feb 02 17:02:08 crc kubenswrapper[4835]: I0202 17:02:08.216507 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:08 crc kubenswrapper[4835]: I0202 17:02:08.216537 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:08 crc kubenswrapper[4835]: I0202 17:02:08.216550 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:08 crc kubenswrapper[4835]: I0202 17:02:08.246081 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:08 crc kubenswrapper[4835]: I0202 17:02:08.248297 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" podStartSLOduration=8.248262591 podStartE2EDuration="8.248262591s" podCreationTimestamp="2026-02-02 17:02:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:02:08.240651504 +0000 UTC m=+719.862255604" watchObservedRunningTime="2026-02-02 17:02:08.248262591 +0000 UTC m=+719.869866671" Feb 02 17:02:08 crc kubenswrapper[4835]: I0202 17:02:08.252508 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:09 crc kubenswrapper[4835]: I0202 17:02:09.553764 4835 scope.go:117] "RemoveContainer" containerID="372941298fdf56b4df6c18c369051d2ed94b952a9af957014c6d1fb141a71fab" Feb 02 17:02:10 crc kubenswrapper[4835]: I0202 17:02:10.230452 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hzst6_92da4528-a699-45b1-aed0-d49a382bf0a1/kube-multus/2.log" Feb 02 17:02:12 crc kubenswrapper[4835]: I0202 17:02:12.188820 4835 scope.go:117] "RemoveContainer" containerID="561a1a9beb47443e82e9257c7fd897da040fa5f33b07d13929eb4206b7e50a75" Feb 02 17:02:12 crc kubenswrapper[4835]: E0202 17:02:12.189348 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-hzst6_openshift-multus(92da4528-a699-45b1-aed0-d49a382bf0a1)\"" pod="openshift-multus/multus-hzst6" podUID="92da4528-a699-45b1-aed0-d49a382bf0a1" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.348297 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc"] Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.349716 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.352931 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.358660 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc"] Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.431002 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.431063 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9jgm\" (UniqueName: \"kubernetes.io/projected/3d697674-78a8-4c19-96a7-5aea46402c5e-kube-api-access-c9jgm\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.431190 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.532759 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9jgm\" (UniqueName: \"kubernetes.io/projected/3d697674-78a8-4c19-96a7-5aea46402c5e-kube-api-access-c9jgm\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.532948 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.533046 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.533570 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.533752 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.557456 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9jgm\" (UniqueName: \"kubernetes.io/projected/3d697674-78a8-4c19-96a7-5aea46402c5e-kube-api-access-c9jgm\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: I0202 17:02:14.676917 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: E0202 17:02:14.703534 4835 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(340afb87781090d6eebf1e2dbe9cedf7885493ea70abdaa37abe77f366890f5b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 17:02:14 crc kubenswrapper[4835]: E0202 17:02:14.703638 4835 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(340afb87781090d6eebf1e2dbe9cedf7885493ea70abdaa37abe77f366890f5b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: E0202 17:02:14.703675 4835 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(340afb87781090d6eebf1e2dbe9cedf7885493ea70abdaa37abe77f366890f5b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:14 crc kubenswrapper[4835]: E0202 17:02:14.703753 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace(3d697674-78a8-4c19-96a7-5aea46402c5e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace(3d697674-78a8-4c19-96a7-5aea46402c5e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(340afb87781090d6eebf1e2dbe9cedf7885493ea70abdaa37abe77f366890f5b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" podUID="3d697674-78a8-4c19-96a7-5aea46402c5e" Feb 02 17:02:15 crc kubenswrapper[4835]: I0202 17:02:15.258292 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:15 crc kubenswrapper[4835]: I0202 17:02:15.259032 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:15 crc kubenswrapper[4835]: E0202 17:02:15.278692 4835 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(84cf9c8884fb9c13ac27f484143a4769f32c443187088b7b8e3b0cc8cabd14de): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 17:02:15 crc kubenswrapper[4835]: E0202 17:02:15.278758 4835 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(84cf9c8884fb9c13ac27f484143a4769f32c443187088b7b8e3b0cc8cabd14de): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:15 crc kubenswrapper[4835]: E0202 17:02:15.278786 4835 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(84cf9c8884fb9c13ac27f484143a4769f32c443187088b7b8e3b0cc8cabd14de): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:15 crc kubenswrapper[4835]: E0202 17:02:15.278840 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace(3d697674-78a8-4c19-96a7-5aea46402c5e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace(3d697674-78a8-4c19-96a7-5aea46402c5e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(84cf9c8884fb9c13ac27f484143a4769f32c443187088b7b8e3b0cc8cabd14de): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" podUID="3d697674-78a8-4c19-96a7-5aea46402c5e" Feb 02 17:02:27 crc kubenswrapper[4835]: I0202 17:02:27.188171 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:27 crc kubenswrapper[4835]: I0202 17:02:27.188832 4835 scope.go:117] "RemoveContainer" containerID="561a1a9beb47443e82e9257c7fd897da040fa5f33b07d13929eb4206b7e50a75" Feb 02 17:02:27 crc kubenswrapper[4835]: I0202 17:02:27.189684 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:27 crc kubenswrapper[4835]: E0202 17:02:27.219474 4835 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(8401431a8d545bde7d02110bdf1534c486a11deb8d71f075efcdc82099d20ba5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 17:02:27 crc kubenswrapper[4835]: E0202 17:02:27.219548 4835 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(8401431a8d545bde7d02110bdf1534c486a11deb8d71f075efcdc82099d20ba5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:27 crc kubenswrapper[4835]: E0202 17:02:27.219586 4835 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(8401431a8d545bde7d02110bdf1534c486a11deb8d71f075efcdc82099d20ba5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:27 crc kubenswrapper[4835]: E0202 17:02:27.219657 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace(3d697674-78a8-4c19-96a7-5aea46402c5e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace(3d697674-78a8-4c19-96a7-5aea46402c5e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_openshift-marketplace_3d697674-78a8-4c19-96a7-5aea46402c5e_0(8401431a8d545bde7d02110bdf1534c486a11deb8d71f075efcdc82099d20ba5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" podUID="3d697674-78a8-4c19-96a7-5aea46402c5e" Feb 02 17:02:28 crc kubenswrapper[4835]: I0202 17:02:28.344576 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hzst6_92da4528-a699-45b1-aed0-d49a382bf0a1/kube-multus/2.log" Feb 02 17:02:28 crc kubenswrapper[4835]: I0202 17:02:28.345551 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hzst6" event={"ID":"92da4528-a699-45b1-aed0-d49a382bf0a1","Type":"ContainerStarted","Data":"7e0b2497a87e90629a0896d7ca18d422f34e3bfa955328ac7b29cc98d343ccc6"} Feb 02 17:02:31 crc kubenswrapper[4835]: I0202 17:02:31.099104 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mxwh9" Feb 02 17:02:38 crc kubenswrapper[4835]: I0202 17:02:38.938101 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wwcps"] Feb 02 17:02:38 crc kubenswrapper[4835]: I0202 17:02:38.939587 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:38 crc kubenswrapper[4835]: I0202 17:02:38.953818 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wwcps"] Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.094738 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-catalog-content\") pod \"certified-operators-wwcps\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.094789 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-utilities\") pod \"certified-operators-wwcps\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.094843 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlq7r\" (UniqueName: \"kubernetes.io/projected/585157d3-fd1c-4406-bc2a-ab635df9c689-kube-api-access-hlq7r\") pod \"certified-operators-wwcps\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.189532 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.190935 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.196478 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-catalog-content\") pod \"certified-operators-wwcps\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.196524 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-utilities\") pod \"certified-operators-wwcps\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.196550 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlq7r\" (UniqueName: \"kubernetes.io/projected/585157d3-fd1c-4406-bc2a-ab635df9c689-kube-api-access-hlq7r\") pod \"certified-operators-wwcps\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.197323 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-catalog-content\") pod \"certified-operators-wwcps\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.197598 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-utilities\") pod \"certified-operators-wwcps\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.229449 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlq7r\" (UniqueName: \"kubernetes.io/projected/585157d3-fd1c-4406-bc2a-ab635df9c689-kube-api-access-hlq7r\") pod \"certified-operators-wwcps\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.266908 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.461197 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc"] Feb 02 17:02:39 crc kubenswrapper[4835]: I0202 17:02:39.590778 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wwcps"] Feb 02 17:02:39 crc kubenswrapper[4835]: W0202 17:02:39.599384 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod585157d3_fd1c_4406_bc2a_ab635df9c689.slice/crio-ac6b500efae38a4652592e4a6cffde88dd7e320cfccab8c29178ab1939baebe0 WatchSource:0}: Error finding container ac6b500efae38a4652592e4a6cffde88dd7e320cfccab8c29178ab1939baebe0: Status 404 returned error can't find the container with id ac6b500efae38a4652592e4a6cffde88dd7e320cfccab8c29178ab1939baebe0 Feb 02 17:02:40 crc kubenswrapper[4835]: I0202 17:02:40.444380 4835 generic.go:334] "Generic (PLEG): container finished" podID="585157d3-fd1c-4406-bc2a-ab635df9c689" containerID="901168431cf64f66066b23bbba206e7594ab6ff1e2b5f7b4afd97a5a795fbb49" exitCode=0 Feb 02 17:02:40 crc kubenswrapper[4835]: I0202 17:02:40.444863 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwcps" event={"ID":"585157d3-fd1c-4406-bc2a-ab635df9c689","Type":"ContainerDied","Data":"901168431cf64f66066b23bbba206e7594ab6ff1e2b5f7b4afd97a5a795fbb49"} Feb 02 17:02:40 crc kubenswrapper[4835]: I0202 17:02:40.444949 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwcps" event={"ID":"585157d3-fd1c-4406-bc2a-ab635df9c689","Type":"ContainerStarted","Data":"ac6b500efae38a4652592e4a6cffde88dd7e320cfccab8c29178ab1939baebe0"} Feb 02 17:02:40 crc kubenswrapper[4835]: I0202 17:02:40.450281 4835 generic.go:334] "Generic (PLEG): container finished" podID="3d697674-78a8-4c19-96a7-5aea46402c5e" containerID="c5d7c86cc1a999c39aa858c74f25ec144c29ac7e1c2fb591cc3641f8af650d3f" exitCode=0 Feb 02 17:02:40 crc kubenswrapper[4835]: I0202 17:02:40.450349 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" event={"ID":"3d697674-78a8-4c19-96a7-5aea46402c5e","Type":"ContainerDied","Data":"c5d7c86cc1a999c39aa858c74f25ec144c29ac7e1c2fb591cc3641f8af650d3f"} Feb 02 17:02:40 crc kubenswrapper[4835]: I0202 17:02:40.450389 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" event={"ID":"3d697674-78a8-4c19-96a7-5aea46402c5e","Type":"ContainerStarted","Data":"e13ea930bff8f69daa7ee4272d9b1155393a7c1b407df6aa37b8d84478dd178a"} Feb 02 17:02:41 crc kubenswrapper[4835]: I0202 17:02:41.459466 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwcps" event={"ID":"585157d3-fd1c-4406-bc2a-ab635df9c689","Type":"ContainerStarted","Data":"c991f0e1da0a3bb45854e16410af2bed402f06d893bc88eb91441d6ee9c8b716"} Feb 02 17:02:42 crc kubenswrapper[4835]: I0202 17:02:42.469001 4835 generic.go:334] "Generic (PLEG): container finished" podID="585157d3-fd1c-4406-bc2a-ab635df9c689" containerID="c991f0e1da0a3bb45854e16410af2bed402f06d893bc88eb91441d6ee9c8b716" exitCode=0 Feb 02 17:02:42 crc kubenswrapper[4835]: I0202 17:02:42.469075 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwcps" event={"ID":"585157d3-fd1c-4406-bc2a-ab635df9c689","Type":"ContainerDied","Data":"c991f0e1da0a3bb45854e16410af2bed402f06d893bc88eb91441d6ee9c8b716"} Feb 02 17:02:42 crc kubenswrapper[4835]: I0202 17:02:42.472976 4835 generic.go:334] "Generic (PLEG): container finished" podID="3d697674-78a8-4c19-96a7-5aea46402c5e" containerID="c924fadb05ebb42b129f02790f9a5f13b3125ff47d8af52728a55931a74e1f77" exitCode=0 Feb 02 17:02:42 crc kubenswrapper[4835]: I0202 17:02:42.473058 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" event={"ID":"3d697674-78a8-4c19-96a7-5aea46402c5e","Type":"ContainerDied","Data":"c924fadb05ebb42b129f02790f9a5f13b3125ff47d8af52728a55931a74e1f77"} Feb 02 17:02:43 crc kubenswrapper[4835]: I0202 17:02:43.483113 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwcps" event={"ID":"585157d3-fd1c-4406-bc2a-ab635df9c689","Type":"ContainerStarted","Data":"999224313fb7d93d0419ddbb03e47f5c0ced188abbb0677704a4605b5a149acb"} Feb 02 17:02:43 crc kubenswrapper[4835]: I0202 17:02:43.486520 4835 generic.go:334] "Generic (PLEG): container finished" podID="3d697674-78a8-4c19-96a7-5aea46402c5e" containerID="9270de17fc30912e4aae8ba48fc59d68a369b53ddbf26391493d44dc7a6c77c6" exitCode=0 Feb 02 17:02:43 crc kubenswrapper[4835]: I0202 17:02:43.486569 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" event={"ID":"3d697674-78a8-4c19-96a7-5aea46402c5e","Type":"ContainerDied","Data":"9270de17fc30912e4aae8ba48fc59d68a369b53ddbf26391493d44dc7a6c77c6"} Feb 02 17:02:43 crc kubenswrapper[4835]: I0202 17:02:43.516255 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wwcps" podStartSLOduration=2.991153018 podStartE2EDuration="5.516230026s" podCreationTimestamp="2026-02-02 17:02:38 +0000 UTC" firstStartedPulling="2026-02-02 17:02:40.452917021 +0000 UTC m=+752.074521131" lastFinishedPulling="2026-02-02 17:02:42.977994029 +0000 UTC m=+754.599598139" observedRunningTime="2026-02-02 17:02:43.503633767 +0000 UTC m=+755.125237857" watchObservedRunningTime="2026-02-02 17:02:43.516230026 +0000 UTC m=+755.137834116" Feb 02 17:02:43 crc kubenswrapper[4835]: I0202 17:02:43.717108 4835 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 17:02:44 crc kubenswrapper[4835]: I0202 17:02:44.718910 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:44 crc kubenswrapper[4835]: I0202 17:02:44.879692 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-bundle\") pod \"3d697674-78a8-4c19-96a7-5aea46402c5e\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " Feb 02 17:02:44 crc kubenswrapper[4835]: I0202 17:02:44.879926 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-util\") pod \"3d697674-78a8-4c19-96a7-5aea46402c5e\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " Feb 02 17:02:44 crc kubenswrapper[4835]: I0202 17:02:44.880026 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9jgm\" (UniqueName: \"kubernetes.io/projected/3d697674-78a8-4c19-96a7-5aea46402c5e-kube-api-access-c9jgm\") pod \"3d697674-78a8-4c19-96a7-5aea46402c5e\" (UID: \"3d697674-78a8-4c19-96a7-5aea46402c5e\") " Feb 02 17:02:44 crc kubenswrapper[4835]: I0202 17:02:44.880394 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-bundle" (OuterVolumeSpecName: "bundle") pod "3d697674-78a8-4c19-96a7-5aea46402c5e" (UID: "3d697674-78a8-4c19-96a7-5aea46402c5e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:02:44 crc kubenswrapper[4835]: I0202 17:02:44.886188 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d697674-78a8-4c19-96a7-5aea46402c5e-kube-api-access-c9jgm" (OuterVolumeSpecName: "kube-api-access-c9jgm") pod "3d697674-78a8-4c19-96a7-5aea46402c5e" (UID: "3d697674-78a8-4c19-96a7-5aea46402c5e"). InnerVolumeSpecName "kube-api-access-c9jgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:02:44 crc kubenswrapper[4835]: I0202 17:02:44.893896 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-util" (OuterVolumeSpecName: "util") pod "3d697674-78a8-4c19-96a7-5aea46402c5e" (UID: "3d697674-78a8-4c19-96a7-5aea46402c5e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:02:44 crc kubenswrapper[4835]: I0202 17:02:44.981537 4835 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-util\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:44 crc kubenswrapper[4835]: I0202 17:02:44.981609 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9jgm\" (UniqueName: \"kubernetes.io/projected/3d697674-78a8-4c19-96a7-5aea46402c5e-kube-api-access-c9jgm\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:44 crc kubenswrapper[4835]: I0202 17:02:44.981640 4835 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d697674-78a8-4c19-96a7-5aea46402c5e-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.124904 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fj5jf"] Feb 02 17:02:45 crc kubenswrapper[4835]: E0202 17:02:45.125167 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d697674-78a8-4c19-96a7-5aea46402c5e" containerName="util" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.125188 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d697674-78a8-4c19-96a7-5aea46402c5e" containerName="util" Feb 02 17:02:45 crc kubenswrapper[4835]: E0202 17:02:45.125207 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d697674-78a8-4c19-96a7-5aea46402c5e" containerName="extract" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.125215 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d697674-78a8-4c19-96a7-5aea46402c5e" containerName="extract" Feb 02 17:02:45 crc kubenswrapper[4835]: E0202 17:02:45.125236 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d697674-78a8-4c19-96a7-5aea46402c5e" containerName="pull" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.125243 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d697674-78a8-4c19-96a7-5aea46402c5e" containerName="pull" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.125374 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d697674-78a8-4c19-96a7-5aea46402c5e" containerName="extract" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.126261 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.143885 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fj5jf"] Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.285005 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-utilities\") pod \"redhat-operators-fj5jf\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.285079 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-catalog-content\") pod \"redhat-operators-fj5jf\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.285105 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lngt\" (UniqueName: \"kubernetes.io/projected/f145acf3-d001-4206-90f7-8b129d3bd97f-kube-api-access-7lngt\") pod \"redhat-operators-fj5jf\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.386859 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-utilities\") pod \"redhat-operators-fj5jf\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.386952 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-catalog-content\") pod \"redhat-operators-fj5jf\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.387012 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lngt\" (UniqueName: \"kubernetes.io/projected/f145acf3-d001-4206-90f7-8b129d3bd97f-kube-api-access-7lngt\") pod \"redhat-operators-fj5jf\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.387656 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-catalog-content\") pod \"redhat-operators-fj5jf\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.387729 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-utilities\") pod \"redhat-operators-fj5jf\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.417487 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lngt\" (UniqueName: \"kubernetes.io/projected/f145acf3-d001-4206-90f7-8b129d3bd97f-kube-api-access-7lngt\") pod \"redhat-operators-fj5jf\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.443057 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.501608 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" event={"ID":"3d697674-78a8-4c19-96a7-5aea46402c5e","Type":"ContainerDied","Data":"e13ea930bff8f69daa7ee4272d9b1155393a7c1b407df6aa37b8d84478dd178a"} Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.501657 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e13ea930bff8f69daa7ee4272d9b1155393a7c1b407df6aa37b8d84478dd178a" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.501729 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc" Feb 02 17:02:45 crc kubenswrapper[4835]: I0202 17:02:45.645113 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fj5jf"] Feb 02 17:02:45 crc kubenswrapper[4835]: W0202 17:02:45.652826 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf145acf3_d001_4206_90f7_8b129d3bd97f.slice/crio-5448ed15da0a18969af9e115df5468d6952251865b2500a06201b7181b5fa90e WatchSource:0}: Error finding container 5448ed15da0a18969af9e115df5468d6952251865b2500a06201b7181b5fa90e: Status 404 returned error can't find the container with id 5448ed15da0a18969af9e115df5468d6952251865b2500a06201b7181b5fa90e Feb 02 17:02:46 crc kubenswrapper[4835]: I0202 17:02:46.515821 4835 generic.go:334] "Generic (PLEG): container finished" podID="f145acf3-d001-4206-90f7-8b129d3bd97f" containerID="00f50bd2f8e324b87116ad00c76ebcb34753fa037fb340f3d02d978af55b7b2a" exitCode=0 Feb 02 17:02:46 crc kubenswrapper[4835]: I0202 17:02:46.515877 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj5jf" event={"ID":"f145acf3-d001-4206-90f7-8b129d3bd97f","Type":"ContainerDied","Data":"00f50bd2f8e324b87116ad00c76ebcb34753fa037fb340f3d02d978af55b7b2a"} Feb 02 17:02:46 crc kubenswrapper[4835]: I0202 17:02:46.516046 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj5jf" event={"ID":"f145acf3-d001-4206-90f7-8b129d3bd97f","Type":"ContainerStarted","Data":"5448ed15da0a18969af9e115df5468d6952251865b2500a06201b7181b5fa90e"} Feb 02 17:02:48 crc kubenswrapper[4835]: I0202 17:02:48.534057 4835 generic.go:334] "Generic (PLEG): container finished" podID="f145acf3-d001-4206-90f7-8b129d3bd97f" containerID="5f3b10d30e098f2da862dce9285c2be5ef36648edaeef17aec0fff34ba453e8e" exitCode=0 Feb 02 17:02:48 crc kubenswrapper[4835]: I0202 17:02:48.534161 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj5jf" event={"ID":"f145acf3-d001-4206-90f7-8b129d3bd97f","Type":"ContainerDied","Data":"5f3b10d30e098f2da862dce9285c2be5ef36648edaeef17aec0fff34ba453e8e"} Feb 02 17:02:49 crc kubenswrapper[4835]: I0202 17:02:49.267153 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:49 crc kubenswrapper[4835]: I0202 17:02:49.267206 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:49 crc kubenswrapper[4835]: I0202 17:02:49.309431 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:49 crc kubenswrapper[4835]: I0202 17:02:49.545937 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj5jf" event={"ID":"f145acf3-d001-4206-90f7-8b129d3bd97f","Type":"ContainerStarted","Data":"1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9"} Feb 02 17:02:49 crc kubenswrapper[4835]: I0202 17:02:49.570321 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fj5jf" podStartSLOduration=1.798537002 podStartE2EDuration="4.570298098s" podCreationTimestamp="2026-02-02 17:02:45 +0000 UTC" firstStartedPulling="2026-02-02 17:02:46.519336366 +0000 UTC m=+758.140940446" lastFinishedPulling="2026-02-02 17:02:49.291097462 +0000 UTC m=+760.912701542" observedRunningTime="2026-02-02 17:02:49.57002634 +0000 UTC m=+761.191630420" watchObservedRunningTime="2026-02-02 17:02:49.570298098 +0000 UTC m=+761.191902178" Feb 02 17:02:49 crc kubenswrapper[4835]: I0202 17:02:49.585497 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:50 crc kubenswrapper[4835]: I0202 17:02:50.318182 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-ck9dl"] Feb 02 17:02:50 crc kubenswrapper[4835]: I0202 17:02:50.318947 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-ck9dl" Feb 02 17:02:50 crc kubenswrapper[4835]: I0202 17:02:50.320829 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-pxsh4" Feb 02 17:02:50 crc kubenswrapper[4835]: I0202 17:02:50.320923 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Feb 02 17:02:50 crc kubenswrapper[4835]: I0202 17:02:50.327450 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Feb 02 17:02:50 crc kubenswrapper[4835]: I0202 17:02:50.331540 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-ck9dl"] Feb 02 17:02:50 crc kubenswrapper[4835]: I0202 17:02:50.454496 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc67j\" (UniqueName: \"kubernetes.io/projected/8abbe167-63ff-48da-ad70-f298a68aab19-kube-api-access-pc67j\") pod \"nmstate-operator-646758c888-ck9dl\" (UID: \"8abbe167-63ff-48da-ad70-f298a68aab19\") " pod="openshift-nmstate/nmstate-operator-646758c888-ck9dl" Feb 02 17:02:50 crc kubenswrapper[4835]: I0202 17:02:50.555747 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc67j\" (UniqueName: \"kubernetes.io/projected/8abbe167-63ff-48da-ad70-f298a68aab19-kube-api-access-pc67j\") pod \"nmstate-operator-646758c888-ck9dl\" (UID: \"8abbe167-63ff-48da-ad70-f298a68aab19\") " pod="openshift-nmstate/nmstate-operator-646758c888-ck9dl" Feb 02 17:02:50 crc kubenswrapper[4835]: I0202 17:02:50.576425 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc67j\" (UniqueName: \"kubernetes.io/projected/8abbe167-63ff-48da-ad70-f298a68aab19-kube-api-access-pc67j\") pod \"nmstate-operator-646758c888-ck9dl\" (UID: \"8abbe167-63ff-48da-ad70-f298a68aab19\") " pod="openshift-nmstate/nmstate-operator-646758c888-ck9dl" Feb 02 17:02:50 crc kubenswrapper[4835]: I0202 17:02:50.631053 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-ck9dl" Feb 02 17:02:51 crc kubenswrapper[4835]: I0202 17:02:51.073766 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-ck9dl"] Feb 02 17:02:51 crc kubenswrapper[4835]: I0202 17:02:51.557026 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-ck9dl" event={"ID":"8abbe167-63ff-48da-ad70-f298a68aab19","Type":"ContainerStarted","Data":"2f6f1080bc370c5b9bfb1e9be4928e98b6dd87130e5ba2738e2e44ebc89c99c3"} Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.313358 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wwcps"] Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.313666 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wwcps" podUID="585157d3-fd1c-4406-bc2a-ab635df9c689" containerName="registry-server" containerID="cri-o://999224313fb7d93d0419ddbb03e47f5c0ced188abbb0677704a4605b5a149acb" gracePeriod=2 Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.565977 4835 generic.go:334] "Generic (PLEG): container finished" podID="585157d3-fd1c-4406-bc2a-ab635df9c689" containerID="999224313fb7d93d0419ddbb03e47f5c0ced188abbb0677704a4605b5a149acb" exitCode=0 Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.566046 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwcps" event={"ID":"585157d3-fd1c-4406-bc2a-ab635df9c689","Type":"ContainerDied","Data":"999224313fb7d93d0419ddbb03e47f5c0ced188abbb0677704a4605b5a149acb"} Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.688386 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.784701 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-utilities\") pod \"585157d3-fd1c-4406-bc2a-ab635df9c689\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.784878 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-catalog-content\") pod \"585157d3-fd1c-4406-bc2a-ab635df9c689\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.784951 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlq7r\" (UniqueName: \"kubernetes.io/projected/585157d3-fd1c-4406-bc2a-ab635df9c689-kube-api-access-hlq7r\") pod \"585157d3-fd1c-4406-bc2a-ab635df9c689\" (UID: \"585157d3-fd1c-4406-bc2a-ab635df9c689\") " Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.786655 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-utilities" (OuterVolumeSpecName: "utilities") pod "585157d3-fd1c-4406-bc2a-ab635df9c689" (UID: "585157d3-fd1c-4406-bc2a-ab635df9c689"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.791352 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/585157d3-fd1c-4406-bc2a-ab635df9c689-kube-api-access-hlq7r" (OuterVolumeSpecName: "kube-api-access-hlq7r") pod "585157d3-fd1c-4406-bc2a-ab635df9c689" (UID: "585157d3-fd1c-4406-bc2a-ab635df9c689"). InnerVolumeSpecName "kube-api-access-hlq7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.845742 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "585157d3-fd1c-4406-bc2a-ab635df9c689" (UID: "585157d3-fd1c-4406-bc2a-ab635df9c689"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.887307 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlq7r\" (UniqueName: \"kubernetes.io/projected/585157d3-fd1c-4406-bc2a-ab635df9c689-kube-api-access-hlq7r\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.887338 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:52 crc kubenswrapper[4835]: I0202 17:02:52.887349 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585157d3-fd1c-4406-bc2a-ab635df9c689-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:02:53 crc kubenswrapper[4835]: I0202 17:02:53.574750 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwcps" event={"ID":"585157d3-fd1c-4406-bc2a-ab635df9c689","Type":"ContainerDied","Data":"ac6b500efae38a4652592e4a6cffde88dd7e320cfccab8c29178ab1939baebe0"} Feb 02 17:02:53 crc kubenswrapper[4835]: I0202 17:02:53.574837 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwcps" Feb 02 17:02:53 crc kubenswrapper[4835]: I0202 17:02:53.575083 4835 scope.go:117] "RemoveContainer" containerID="999224313fb7d93d0419ddbb03e47f5c0ced188abbb0677704a4605b5a149acb" Feb 02 17:02:53 crc kubenswrapper[4835]: I0202 17:02:53.592094 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wwcps"] Feb 02 17:02:53 crc kubenswrapper[4835]: I0202 17:02:53.599202 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wwcps"] Feb 02 17:02:53 crc kubenswrapper[4835]: I0202 17:02:53.601523 4835 scope.go:117] "RemoveContainer" containerID="c991f0e1da0a3bb45854e16410af2bed402f06d893bc88eb91441d6ee9c8b716" Feb 02 17:02:53 crc kubenswrapper[4835]: I0202 17:02:53.627498 4835 scope.go:117] "RemoveContainer" containerID="901168431cf64f66066b23bbba206e7594ab6ff1e2b5f7b4afd97a5a795fbb49" Feb 02 17:02:54 crc kubenswrapper[4835]: I0202 17:02:54.583016 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-ck9dl" event={"ID":"8abbe167-63ff-48da-ad70-f298a68aab19","Type":"ContainerStarted","Data":"be497f322be3afbd129ad359de73cc651d3cdc5ebe1b4defbcf57e3607cd479a"} Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.195206 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="585157d3-fd1c-4406-bc2a-ab635df9c689" path="/var/lib/kubelet/pods/585157d3-fd1c-4406-bc2a-ab635df9c689/volumes" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.443742 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.443796 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.480743 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-ck9dl" podStartSLOduration=3.225374546 podStartE2EDuration="5.480719596s" podCreationTimestamp="2026-02-02 17:02:50 +0000 UTC" firstStartedPulling="2026-02-02 17:02:51.099038982 +0000 UTC m=+762.720643062" lastFinishedPulling="2026-02-02 17:02:53.354384032 +0000 UTC m=+764.975988112" observedRunningTime="2026-02-02 17:02:54.604917958 +0000 UTC m=+766.226522058" watchObservedRunningTime="2026-02-02 17:02:55.480719596 +0000 UTC m=+767.102323676" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.481443 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-c5rjp"] Feb 02 17:02:55 crc kubenswrapper[4835]: E0202 17:02:55.481665 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585157d3-fd1c-4406-bc2a-ab635df9c689" containerName="extract-utilities" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.481684 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="585157d3-fd1c-4406-bc2a-ab635df9c689" containerName="extract-utilities" Feb 02 17:02:55 crc kubenswrapper[4835]: E0202 17:02:55.481698 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585157d3-fd1c-4406-bc2a-ab635df9c689" containerName="extract-content" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.481706 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="585157d3-fd1c-4406-bc2a-ab635df9c689" containerName="extract-content" Feb 02 17:02:55 crc kubenswrapper[4835]: E0202 17:02:55.481718 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585157d3-fd1c-4406-bc2a-ab635df9c689" containerName="registry-server" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.481725 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="585157d3-fd1c-4406-bc2a-ab635df9c689" containerName="registry-server" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.481839 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="585157d3-fd1c-4406-bc2a-ab635df9c689" containerName="registry-server" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.482434 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-c5rjp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.484880 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-bvfgn" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.499098 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5"] Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.499967 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.501444 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.509170 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-c5rjp"] Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.517635 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.524139 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-9bwnb"] Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.525024 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.546187 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5"] Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.619926 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldl8z\" (UniqueName: \"kubernetes.io/projected/9b89aa5a-f847-42b1-a763-3bdcbcde8158-kube-api-access-ldl8z\") pod \"nmstate-webhook-8474b5b9d8-44zt5\" (UID: \"9b89aa5a-f847-42b1-a763-3bdcbcde8158\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.619979 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-ovs-socket\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.620011 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrhd8\" (UniqueName: \"kubernetes.io/projected/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-kube-api-access-lrhd8\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.620036 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8szr7\" (UniqueName: \"kubernetes.io/projected/c615d857-c500-4fe2-b699-97a5d8ce3311-kube-api-access-8szr7\") pod \"nmstate-metrics-54757c584b-c5rjp\" (UID: \"c615d857-c500-4fe2-b699-97a5d8ce3311\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-c5rjp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.620052 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/9b89aa5a-f847-42b1-a763-3bdcbcde8158-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-44zt5\" (UID: \"9b89aa5a-f847-42b1-a763-3bdcbcde8158\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.620087 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-dbus-socket\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.620119 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-nmstate-lock\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.638130 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8"] Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.638889 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.646651 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-x2jnt" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.646878 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.647114 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.650732 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8"] Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.658469 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.722358 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/3d3a7f96-7388-4e16-991c-6e99de2387dc-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-r4xs8\" (UID: \"3d3a7f96-7388-4e16-991c-6e99de2387dc\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.722478 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldl8z\" (UniqueName: \"kubernetes.io/projected/9b89aa5a-f847-42b1-a763-3bdcbcde8158-kube-api-access-ldl8z\") pod \"nmstate-webhook-8474b5b9d8-44zt5\" (UID: \"9b89aa5a-f847-42b1-a763-3bdcbcde8158\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.722516 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-ovs-socket\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.722564 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrhd8\" (UniqueName: \"kubernetes.io/projected/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-kube-api-access-lrhd8\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.722593 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8szr7\" (UniqueName: \"kubernetes.io/projected/c615d857-c500-4fe2-b699-97a5d8ce3311-kube-api-access-8szr7\") pod \"nmstate-metrics-54757c584b-c5rjp\" (UID: \"c615d857-c500-4fe2-b699-97a5d8ce3311\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-c5rjp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.722617 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/9b89aa5a-f847-42b1-a763-3bdcbcde8158-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-44zt5\" (UID: \"9b89aa5a-f847-42b1-a763-3bdcbcde8158\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.722642 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-dbus-socket\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.722665 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/3d3a7f96-7388-4e16-991c-6e99de2387dc-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-r4xs8\" (UID: \"3d3a7f96-7388-4e16-991c-6e99de2387dc\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.722718 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-nmstate-lock\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.722777 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf2s8\" (UniqueName: \"kubernetes.io/projected/3d3a7f96-7388-4e16-991c-6e99de2387dc-kube-api-access-xf2s8\") pod \"nmstate-console-plugin-7754f76f8b-r4xs8\" (UID: \"3d3a7f96-7388-4e16-991c-6e99de2387dc\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.723605 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-ovs-socket\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.725707 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-nmstate-lock\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.725829 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-dbus-socket\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.730839 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/9b89aa5a-f847-42b1-a763-3bdcbcde8158-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-44zt5\" (UID: \"9b89aa5a-f847-42b1-a763-3bdcbcde8158\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.744165 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8szr7\" (UniqueName: \"kubernetes.io/projected/c615d857-c500-4fe2-b699-97a5d8ce3311-kube-api-access-8szr7\") pod \"nmstate-metrics-54757c584b-c5rjp\" (UID: \"c615d857-c500-4fe2-b699-97a5d8ce3311\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-c5rjp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.744834 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldl8z\" (UniqueName: \"kubernetes.io/projected/9b89aa5a-f847-42b1-a763-3bdcbcde8158-kube-api-access-ldl8z\") pod \"nmstate-webhook-8474b5b9d8-44zt5\" (UID: \"9b89aa5a-f847-42b1-a763-3bdcbcde8158\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.754829 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrhd8\" (UniqueName: \"kubernetes.io/projected/b5dbad86-74ef-402c-b0ab-5b48d69e8ecc-kube-api-access-lrhd8\") pod \"nmstate-handler-9bwnb\" (UID: \"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc\") " pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.798850 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-c5rjp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.818860 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.829266 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/3d3a7f96-7388-4e16-991c-6e99de2387dc-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-r4xs8\" (UID: \"3d3a7f96-7388-4e16-991c-6e99de2387dc\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.829425 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf2s8\" (UniqueName: \"kubernetes.io/projected/3d3a7f96-7388-4e16-991c-6e99de2387dc-kube-api-access-xf2s8\") pod \"nmstate-console-plugin-7754f76f8b-r4xs8\" (UID: \"3d3a7f96-7388-4e16-991c-6e99de2387dc\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.829460 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/3d3a7f96-7388-4e16-991c-6e99de2387dc-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-r4xs8\" (UID: \"3d3a7f96-7388-4e16-991c-6e99de2387dc\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.830739 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/3d3a7f96-7388-4e16-991c-6e99de2387dc-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-r4xs8\" (UID: \"3d3a7f96-7388-4e16-991c-6e99de2387dc\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.835573 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/3d3a7f96-7388-4e16-991c-6e99de2387dc-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-r4xs8\" (UID: \"3d3a7f96-7388-4e16-991c-6e99de2387dc\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.840695 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.848309 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-895d8658-2wphp"] Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.849107 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.853960 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf2s8\" (UniqueName: \"kubernetes.io/projected/3d3a7f96-7388-4e16-991c-6e99de2387dc-kube-api-access-xf2s8\") pod \"nmstate-console-plugin-7754f76f8b-r4xs8\" (UID: \"3d3a7f96-7388-4e16-991c-6e99de2387dc\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.885310 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-895d8658-2wphp"] Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.931330 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/98072f8d-69ce-4ca3-8745-95bfe268c741-console-serving-cert\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.931402 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-service-ca\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.931565 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-oauth-serving-cert\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.931599 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-trusted-ca-bundle\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.931638 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjf52\" (UniqueName: \"kubernetes.io/projected/98072f8d-69ce-4ca3-8745-95bfe268c741-kube-api-access-xjf52\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.931854 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-console-config\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.931905 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/98072f8d-69ce-4ca3-8745-95bfe268c741-console-oauth-config\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:55 crc kubenswrapper[4835]: I0202 17:02:55.963479 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.033973 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/98072f8d-69ce-4ca3-8745-95bfe268c741-console-oauth-config\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.034039 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/98072f8d-69ce-4ca3-8745-95bfe268c741-console-serving-cert\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.034070 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-service-ca\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.034096 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-oauth-serving-cert\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.034115 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-trusted-ca-bundle\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.034142 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjf52\" (UniqueName: \"kubernetes.io/projected/98072f8d-69ce-4ca3-8745-95bfe268c741-kube-api-access-xjf52\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.034173 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-console-config\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.035165 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-console-config\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.035742 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-oauth-serving-cert\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.036375 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-service-ca\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.037205 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/98072f8d-69ce-4ca3-8745-95bfe268c741-trusted-ca-bundle\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.040856 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/98072f8d-69ce-4ca3-8745-95bfe268c741-console-serving-cert\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.043330 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/98072f8d-69ce-4ca3-8745-95bfe268c741-console-oauth-config\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.059877 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjf52\" (UniqueName: \"kubernetes.io/projected/98072f8d-69ce-4ca3-8745-95bfe268c741-kube-api-access-xjf52\") pod \"console-895d8658-2wphp\" (UID: \"98072f8d-69ce-4ca3-8745-95bfe268c741\") " pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.180222 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8"] Feb 02 17:02:56 crc kubenswrapper[4835]: W0202 17:02:56.186096 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d3a7f96_7388_4e16_991c_6e99de2387dc.slice/crio-ba0c55c5109c1aa9d575781c356c50182f57c5dd4c6e79e596a823c91fd8daa3 WatchSource:0}: Error finding container ba0c55c5109c1aa9d575781c356c50182f57c5dd4c6e79e596a823c91fd8daa3: Status 404 returned error can't find the container with id ba0c55c5109c1aa9d575781c356c50182f57c5dd4c6e79e596a823c91fd8daa3 Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.190301 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-895d8658-2wphp" Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.297635 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-c5rjp"] Feb 02 17:02:56 crc kubenswrapper[4835]: W0202 17:02:56.308000 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc615d857_c500_4fe2_b699_97a5d8ce3311.slice/crio-ebe53f9076d6356cf2317532ba3d368ea929935660ee8e52d5c61a3c3c0b37f6 WatchSource:0}: Error finding container ebe53f9076d6356cf2317532ba3d368ea929935660ee8e52d5c61a3c3c0b37f6: Status 404 returned error can't find the container with id ebe53f9076d6356cf2317532ba3d368ea929935660ee8e52d5c61a3c3c0b37f6 Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.339028 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5"] Feb 02 17:02:56 crc kubenswrapper[4835]: W0202 17:02:56.347036 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b89aa5a_f847_42b1_a763_3bdcbcde8158.slice/crio-9c3c97431545817053c8b93523403965c8f79788fb00198baa1bbbae010d286d WatchSource:0}: Error finding container 9c3c97431545817053c8b93523403965c8f79788fb00198baa1bbbae010d286d: Status 404 returned error can't find the container with id 9c3c97431545817053c8b93523403965c8f79788fb00198baa1bbbae010d286d Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.375939 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-895d8658-2wphp"] Feb 02 17:02:56 crc kubenswrapper[4835]: W0202 17:02:56.382758 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98072f8d_69ce_4ca3_8745_95bfe268c741.slice/crio-6de233dc1cf65b35e0d93107dea05c67b23d164e40dd3baac4571a5fb290122c WatchSource:0}: Error finding container 6de233dc1cf65b35e0d93107dea05c67b23d164e40dd3baac4571a5fb290122c: Status 404 returned error can't find the container with id 6de233dc1cf65b35e0d93107dea05c67b23d164e40dd3baac4571a5fb290122c Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.604177 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" event={"ID":"3d3a7f96-7388-4e16-991c-6e99de2387dc","Type":"ContainerStarted","Data":"ba0c55c5109c1aa9d575781c356c50182f57c5dd4c6e79e596a823c91fd8daa3"} Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.604923 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" event={"ID":"9b89aa5a-f847-42b1-a763-3bdcbcde8158","Type":"ContainerStarted","Data":"9c3c97431545817053c8b93523403965c8f79788fb00198baa1bbbae010d286d"} Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.606011 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-c5rjp" event={"ID":"c615d857-c500-4fe2-b699-97a5d8ce3311","Type":"ContainerStarted","Data":"ebe53f9076d6356cf2317532ba3d368ea929935660ee8e52d5c61a3c3c0b37f6"} Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.607694 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-9bwnb" event={"ID":"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc","Type":"ContainerStarted","Data":"f9908f62a325531f5acf144a8560932c8a1f39f757a7d03a018f26733e30e07f"} Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.609171 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-895d8658-2wphp" event={"ID":"98072f8d-69ce-4ca3-8745-95bfe268c741","Type":"ContainerStarted","Data":"6a639ff5d6cf859752ce27005898a3cfb43830ad3665d17ecea3b9a01a0a4721"} Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.609253 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-895d8658-2wphp" event={"ID":"98072f8d-69ce-4ca3-8745-95bfe268c741","Type":"ContainerStarted","Data":"6de233dc1cf65b35e0d93107dea05c67b23d164e40dd3baac4571a5fb290122c"} Feb 02 17:02:56 crc kubenswrapper[4835]: I0202 17:02:56.637648 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-895d8658-2wphp" podStartSLOduration=1.637622814 podStartE2EDuration="1.637622814s" podCreationTimestamp="2026-02-02 17:02:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:02:56.634970078 +0000 UTC m=+768.256574158" watchObservedRunningTime="2026-02-02 17:02:56.637622814 +0000 UTC m=+768.259226934" Feb 02 17:02:58 crc kubenswrapper[4835]: I0202 17:02:58.526401 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fj5jf"] Feb 02 17:02:58 crc kubenswrapper[4835]: I0202 17:02:58.526850 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fj5jf" podUID="f145acf3-d001-4206-90f7-8b129d3bd97f" containerName="registry-server" containerID="cri-o://1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9" gracePeriod=2 Feb 02 17:02:59 crc kubenswrapper[4835]: I0202 17:02:59.628076 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-c5rjp" event={"ID":"c615d857-c500-4fe2-b699-97a5d8ce3311","Type":"ContainerStarted","Data":"3f134af702d8df09a102be9076b4665a1c8c7782990e40f78b1a89a1a2983b9a"} Feb 02 17:02:59 crc kubenswrapper[4835]: I0202 17:02:59.630551 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" event={"ID":"3d3a7f96-7388-4e16-991c-6e99de2387dc","Type":"ContainerStarted","Data":"9f31620a25fb73b465f41531d217debea50ba38395bfc94ebf68f06e62cfb345"} Feb 02 17:02:59 crc kubenswrapper[4835]: I0202 17:02:59.635139 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" event={"ID":"9b89aa5a-f847-42b1-a763-3bdcbcde8158","Type":"ContainerStarted","Data":"2996c0216dd23bda2db15a203937600e39133da0a144af9ef94f3b4f753d9acd"} Feb 02 17:02:59 crc kubenswrapper[4835]: I0202 17:02:59.635238 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" Feb 02 17:02:59 crc kubenswrapper[4835]: I0202 17:02:59.638753 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-9bwnb" event={"ID":"b5dbad86-74ef-402c-b0ab-5b48d69e8ecc","Type":"ContainerStarted","Data":"cd40fdf671aec13f70c385a0a8f97c9a6d1077d1c4fdecd53df14a1f8f5bfb75"} Feb 02 17:02:59 crc kubenswrapper[4835]: I0202 17:02:59.638908 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:02:59 crc kubenswrapper[4835]: I0202 17:02:59.647901 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r4xs8" podStartSLOduration=2.268650727 podStartE2EDuration="4.647880657s" podCreationTimestamp="2026-02-02 17:02:55 +0000 UTC" firstStartedPulling="2026-02-02 17:02:56.188197047 +0000 UTC m=+767.809801127" lastFinishedPulling="2026-02-02 17:02:58.567426977 +0000 UTC m=+770.189031057" observedRunningTime="2026-02-02 17:02:59.647137135 +0000 UTC m=+771.268741225" watchObservedRunningTime="2026-02-02 17:02:59.647880657 +0000 UTC m=+771.269484737" Feb 02 17:02:59 crc kubenswrapper[4835]: I0202 17:02:59.668671 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-9bwnb" podStartSLOduration=2.011754746 podStartE2EDuration="4.668644768s" podCreationTimestamp="2026-02-02 17:02:55 +0000 UTC" firstStartedPulling="2026-02-02 17:02:55.909472214 +0000 UTC m=+767.531076294" lastFinishedPulling="2026-02-02 17:02:58.566362236 +0000 UTC m=+770.187966316" observedRunningTime="2026-02-02 17:02:59.664159871 +0000 UTC m=+771.285763951" watchObservedRunningTime="2026-02-02 17:02:59.668644768 +0000 UTC m=+771.290248848" Feb 02 17:02:59 crc kubenswrapper[4835]: I0202 17:02:59.682317 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" podStartSLOduration=2.459149615 podStartE2EDuration="4.682262676s" podCreationTimestamp="2026-02-02 17:02:55 +0000 UTC" firstStartedPulling="2026-02-02 17:02:56.350998266 +0000 UTC m=+767.972602346" lastFinishedPulling="2026-02-02 17:02:58.574111327 +0000 UTC m=+770.195715407" observedRunningTime="2026-02-02 17:02:59.677994785 +0000 UTC m=+771.299598865" watchObservedRunningTime="2026-02-02 17:02:59.682262676 +0000 UTC m=+771.303866756" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.319580 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.389971 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lngt\" (UniqueName: \"kubernetes.io/projected/f145acf3-d001-4206-90f7-8b129d3bd97f-kube-api-access-7lngt\") pod \"f145acf3-d001-4206-90f7-8b129d3bd97f\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.390020 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-utilities\") pod \"f145acf3-d001-4206-90f7-8b129d3bd97f\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.390119 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-catalog-content\") pod \"f145acf3-d001-4206-90f7-8b129d3bd97f\" (UID: \"f145acf3-d001-4206-90f7-8b129d3bd97f\") " Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.391091 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-utilities" (OuterVolumeSpecName: "utilities") pod "f145acf3-d001-4206-90f7-8b129d3bd97f" (UID: "f145acf3-d001-4206-90f7-8b129d3bd97f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.394840 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f145acf3-d001-4206-90f7-8b129d3bd97f-kube-api-access-7lngt" (OuterVolumeSpecName: "kube-api-access-7lngt") pod "f145acf3-d001-4206-90f7-8b129d3bd97f" (UID: "f145acf3-d001-4206-90f7-8b129d3bd97f"). InnerVolumeSpecName "kube-api-access-7lngt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.491233 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lngt\" (UniqueName: \"kubernetes.io/projected/f145acf3-d001-4206-90f7-8b129d3bd97f-kube-api-access-7lngt\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.491352 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.514621 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f145acf3-d001-4206-90f7-8b129d3bd97f" (UID: "f145acf3-d001-4206-90f7-8b129d3bd97f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.592537 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f145acf3-d001-4206-90f7-8b129d3bd97f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.647192 4835 generic.go:334] "Generic (PLEG): container finished" podID="f145acf3-d001-4206-90f7-8b129d3bd97f" containerID="1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9" exitCode=0 Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.647285 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fj5jf" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.647351 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj5jf" event={"ID":"f145acf3-d001-4206-90f7-8b129d3bd97f","Type":"ContainerDied","Data":"1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9"} Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.647416 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fj5jf" event={"ID":"f145acf3-d001-4206-90f7-8b129d3bd97f","Type":"ContainerDied","Data":"5448ed15da0a18969af9e115df5468d6952251865b2500a06201b7181b5fa90e"} Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.647442 4835 scope.go:117] "RemoveContainer" containerID="1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.666697 4835 scope.go:117] "RemoveContainer" containerID="5f3b10d30e098f2da862dce9285c2be5ef36648edaeef17aec0fff34ba453e8e" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.683341 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fj5jf"] Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.683467 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fj5jf"] Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.700205 4835 scope.go:117] "RemoveContainer" containerID="00f50bd2f8e324b87116ad00c76ebcb34753fa037fb340f3d02d978af55b7b2a" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.721616 4835 scope.go:117] "RemoveContainer" containerID="1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9" Feb 02 17:03:00 crc kubenswrapper[4835]: E0202 17:03:00.722037 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9\": container with ID starting with 1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9 not found: ID does not exist" containerID="1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.722064 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9"} err="failed to get container status \"1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9\": rpc error: code = NotFound desc = could not find container \"1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9\": container with ID starting with 1a41bc6e633a55543639b1bf9d33c8cae047866238a90cf8ea2756d8d4e3a9f9 not found: ID does not exist" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.722087 4835 scope.go:117] "RemoveContainer" containerID="5f3b10d30e098f2da862dce9285c2be5ef36648edaeef17aec0fff34ba453e8e" Feb 02 17:03:00 crc kubenswrapper[4835]: E0202 17:03:00.722644 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f3b10d30e098f2da862dce9285c2be5ef36648edaeef17aec0fff34ba453e8e\": container with ID starting with 5f3b10d30e098f2da862dce9285c2be5ef36648edaeef17aec0fff34ba453e8e not found: ID does not exist" containerID="5f3b10d30e098f2da862dce9285c2be5ef36648edaeef17aec0fff34ba453e8e" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.722660 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f3b10d30e098f2da862dce9285c2be5ef36648edaeef17aec0fff34ba453e8e"} err="failed to get container status \"5f3b10d30e098f2da862dce9285c2be5ef36648edaeef17aec0fff34ba453e8e\": rpc error: code = NotFound desc = could not find container \"5f3b10d30e098f2da862dce9285c2be5ef36648edaeef17aec0fff34ba453e8e\": container with ID starting with 5f3b10d30e098f2da862dce9285c2be5ef36648edaeef17aec0fff34ba453e8e not found: ID does not exist" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.722672 4835 scope.go:117] "RemoveContainer" containerID="00f50bd2f8e324b87116ad00c76ebcb34753fa037fb340f3d02d978af55b7b2a" Feb 02 17:03:00 crc kubenswrapper[4835]: E0202 17:03:00.722923 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00f50bd2f8e324b87116ad00c76ebcb34753fa037fb340f3d02d978af55b7b2a\": container with ID starting with 00f50bd2f8e324b87116ad00c76ebcb34753fa037fb340f3d02d978af55b7b2a not found: ID does not exist" containerID="00f50bd2f8e324b87116ad00c76ebcb34753fa037fb340f3d02d978af55b7b2a" Feb 02 17:03:00 crc kubenswrapper[4835]: I0202 17:03:00.722941 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00f50bd2f8e324b87116ad00c76ebcb34753fa037fb340f3d02d978af55b7b2a"} err="failed to get container status \"00f50bd2f8e324b87116ad00c76ebcb34753fa037fb340f3d02d978af55b7b2a\": rpc error: code = NotFound desc = could not find container \"00f50bd2f8e324b87116ad00c76ebcb34753fa037fb340f3d02d978af55b7b2a\": container with ID starting with 00f50bd2f8e324b87116ad00c76ebcb34753fa037fb340f3d02d978af55b7b2a not found: ID does not exist" Feb 02 17:03:01 crc kubenswrapper[4835]: I0202 17:03:01.198300 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f145acf3-d001-4206-90f7-8b129d3bd97f" path="/var/lib/kubelet/pods/f145acf3-d001-4206-90f7-8b129d3bd97f/volumes" Feb 02 17:03:01 crc kubenswrapper[4835]: I0202 17:03:01.658362 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-c5rjp" event={"ID":"c615d857-c500-4fe2-b699-97a5d8ce3311","Type":"ContainerStarted","Data":"987b2ddd384db61f954ae6d3fd9b4cfee843ca6d6d7b39d435eae6417b7c8fff"} Feb 02 17:03:01 crc kubenswrapper[4835]: I0202 17:03:01.679269 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-c5rjp" podStartSLOduration=1.673395513 podStartE2EDuration="6.679250804s" podCreationTimestamp="2026-02-02 17:02:55 +0000 UTC" firstStartedPulling="2026-02-02 17:02:56.311406438 +0000 UTC m=+767.933010528" lastFinishedPulling="2026-02-02 17:03:01.317261739 +0000 UTC m=+772.938865819" observedRunningTime="2026-02-02 17:03:01.676702781 +0000 UTC m=+773.298306851" watchObservedRunningTime="2026-02-02 17:03:01.679250804 +0000 UTC m=+773.300854884" Feb 02 17:03:05 crc kubenswrapper[4835]: I0202 17:03:05.860395 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-9bwnb" Feb 02 17:03:06 crc kubenswrapper[4835]: I0202 17:03:06.191260 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-895d8658-2wphp" Feb 02 17:03:06 crc kubenswrapper[4835]: I0202 17:03:06.191340 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-895d8658-2wphp" Feb 02 17:03:06 crc kubenswrapper[4835]: I0202 17:03:06.197645 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-895d8658-2wphp" Feb 02 17:03:06 crc kubenswrapper[4835]: I0202 17:03:06.700197 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-895d8658-2wphp" Feb 02 17:03:06 crc kubenswrapper[4835]: I0202 17:03:06.770241 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xpn8c"] Feb 02 17:03:14 crc kubenswrapper[4835]: I0202 17:03:14.870586 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:03:14 crc kubenswrapper[4835]: I0202 17:03:14.871421 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:03:15 crc kubenswrapper[4835]: I0202 17:03:15.825973 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-44zt5" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.004123 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll"] Feb 02 17:03:29 crc kubenswrapper[4835]: E0202 17:03:29.004856 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f145acf3-d001-4206-90f7-8b129d3bd97f" containerName="extract-utilities" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.004869 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f145acf3-d001-4206-90f7-8b129d3bd97f" containerName="extract-utilities" Feb 02 17:03:29 crc kubenswrapper[4835]: E0202 17:03:29.004879 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f145acf3-d001-4206-90f7-8b129d3bd97f" containerName="extract-content" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.004885 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f145acf3-d001-4206-90f7-8b129d3bd97f" containerName="extract-content" Feb 02 17:03:29 crc kubenswrapper[4835]: E0202 17:03:29.004893 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f145acf3-d001-4206-90f7-8b129d3bd97f" containerName="registry-server" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.004901 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f145acf3-d001-4206-90f7-8b129d3bd97f" containerName="registry-server" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.004989 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f145acf3-d001-4206-90f7-8b129d3bd97f" containerName="registry-server" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.005931 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.007624 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.013127 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll"] Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.074807 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.074873 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4jnm\" (UniqueName: \"kubernetes.io/projected/695aef87-f06b-45ea-a3c1-aadf175760b5-kube-api-access-m4jnm\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.074891 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.176336 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.176426 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4jnm\" (UniqueName: \"kubernetes.io/projected/695aef87-f06b-45ea-a3c1-aadf175760b5-kube-api-access-m4jnm\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.176530 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.176867 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.177038 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.194879 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4jnm\" (UniqueName: \"kubernetes.io/projected/695aef87-f06b-45ea-a3c1-aadf175760b5-kube-api-access-m4jnm\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.336070 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.552890 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll"] Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.836581 4835 generic.go:334] "Generic (PLEG): container finished" podID="695aef87-f06b-45ea-a3c1-aadf175760b5" containerID="4797e81ff06988d0a40851e207484ef9bc5fbb842413f48dc551032ad4beb55d" exitCode=0 Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.836637 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" event={"ID":"695aef87-f06b-45ea-a3c1-aadf175760b5","Type":"ContainerDied","Data":"4797e81ff06988d0a40851e207484ef9bc5fbb842413f48dc551032ad4beb55d"} Feb 02 17:03:29 crc kubenswrapper[4835]: I0202 17:03:29.836896 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" event={"ID":"695aef87-f06b-45ea-a3c1-aadf175760b5","Type":"ContainerStarted","Data":"b608fd0224213a647f8c337a347fd622cdd2475f930027ff2670978e603957f3"} Feb 02 17:03:31 crc kubenswrapper[4835]: I0202 17:03:31.816648 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-xpn8c" podUID="aad2f2e8-6800-4238-a0ab-ee3304bad4c1" containerName="console" containerID="cri-o://44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee" gracePeriod=15 Feb 02 17:03:31 crc kubenswrapper[4835]: I0202 17:03:31.890861 4835 generic.go:334] "Generic (PLEG): container finished" podID="695aef87-f06b-45ea-a3c1-aadf175760b5" containerID="b746447ec12353438e70a8354a7c1d7b3c03f9dc55997caddd8d318c9fa775b2" exitCode=0 Feb 02 17:03:31 crc kubenswrapper[4835]: I0202 17:03:31.890907 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" event={"ID":"695aef87-f06b-45ea-a3c1-aadf175760b5","Type":"ContainerDied","Data":"b746447ec12353438e70a8354a7c1d7b3c03f9dc55997caddd8d318c9fa775b2"} Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.257661 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xpn8c_aad2f2e8-6800-4238-a0ab-ee3304bad4c1/console/0.log" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.258024 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.320919 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-serving-cert\") pod \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.320977 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-oauth-config\") pod \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.321052 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-config\") pod \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.321140 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-trusted-ca-bundle\") pod \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.321225 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w725d\" (UniqueName: \"kubernetes.io/projected/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-kube-api-access-w725d\") pod \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.321851 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-config" (OuterVolumeSpecName: "console-config") pod "aad2f2e8-6800-4238-a0ab-ee3304bad4c1" (UID: "aad2f2e8-6800-4238-a0ab-ee3304bad4c1"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.321887 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "aad2f2e8-6800-4238-a0ab-ee3304bad4c1" (UID: "aad2f2e8-6800-4238-a0ab-ee3304bad4c1"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.322068 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-service-ca\") pod \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.322121 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-oauth-serving-cert\") pod \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\" (UID: \"aad2f2e8-6800-4238-a0ab-ee3304bad4c1\") " Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.322410 4835 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.322430 4835 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.322503 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-service-ca" (OuterVolumeSpecName: "service-ca") pod "aad2f2e8-6800-4238-a0ab-ee3304bad4c1" (UID: "aad2f2e8-6800-4238-a0ab-ee3304bad4c1"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.322714 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "aad2f2e8-6800-4238-a0ab-ee3304bad4c1" (UID: "aad2f2e8-6800-4238-a0ab-ee3304bad4c1"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.327035 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-kube-api-access-w725d" (OuterVolumeSpecName: "kube-api-access-w725d") pod "aad2f2e8-6800-4238-a0ab-ee3304bad4c1" (UID: "aad2f2e8-6800-4238-a0ab-ee3304bad4c1"). InnerVolumeSpecName "kube-api-access-w725d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.327339 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "aad2f2e8-6800-4238-a0ab-ee3304bad4c1" (UID: "aad2f2e8-6800-4238-a0ab-ee3304bad4c1"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.328241 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "aad2f2e8-6800-4238-a0ab-ee3304bad4c1" (UID: "aad2f2e8-6800-4238-a0ab-ee3304bad4c1"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.424122 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w725d\" (UniqueName: \"kubernetes.io/projected/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-kube-api-access-w725d\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.424178 4835 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.424194 4835 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.424209 4835 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.424221 4835 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/aad2f2e8-6800-4238-a0ab-ee3304bad4c1-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.902387 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xpn8c_aad2f2e8-6800-4238-a0ab-ee3304bad4c1/console/0.log" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.902446 4835 generic.go:334] "Generic (PLEG): container finished" podID="aad2f2e8-6800-4238-a0ab-ee3304bad4c1" containerID="44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee" exitCode=2 Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.902536 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xpn8c" event={"ID":"aad2f2e8-6800-4238-a0ab-ee3304bad4c1","Type":"ContainerDied","Data":"44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee"} Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.902569 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xpn8c" event={"ID":"aad2f2e8-6800-4238-a0ab-ee3304bad4c1","Type":"ContainerDied","Data":"ee39f44671c742e7633f8f609bfb27f3126ad58c2a3c1a76b3cb45d562ef9031"} Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.902566 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xpn8c" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.902591 4835 scope.go:117] "RemoveContainer" containerID="44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.908471 4835 generic.go:334] "Generic (PLEG): container finished" podID="695aef87-f06b-45ea-a3c1-aadf175760b5" containerID="93d6a153bcd9ff11d3b0409ff4f6045c59601578da2cfb287e1dcc3787f4c2c4" exitCode=0 Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.908535 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" event={"ID":"695aef87-f06b-45ea-a3c1-aadf175760b5","Type":"ContainerDied","Data":"93d6a153bcd9ff11d3b0409ff4f6045c59601578da2cfb287e1dcc3787f4c2c4"} Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.929641 4835 scope.go:117] "RemoveContainer" containerID="44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee" Feb 02 17:03:32 crc kubenswrapper[4835]: E0202 17:03:32.930022 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee\": container with ID starting with 44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee not found: ID does not exist" containerID="44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.930070 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee"} err="failed to get container status \"44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee\": rpc error: code = NotFound desc = could not find container \"44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee\": container with ID starting with 44984e6e63e056beb7cafbde8b52867b7e2f8485b1acdb23833ecb282f6851ee not found: ID does not exist" Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.952896 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xpn8c"] Feb 02 17:03:32 crc kubenswrapper[4835]: I0202 17:03:32.958489 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-xpn8c"] Feb 02 17:03:33 crc kubenswrapper[4835]: I0202 17:03:33.198626 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aad2f2e8-6800-4238-a0ab-ee3304bad4c1" path="/var/lib/kubelet/pods/aad2f2e8-6800-4238-a0ab-ee3304bad4c1/volumes" Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.148331 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.273124 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-bundle\") pod \"695aef87-f06b-45ea-a3c1-aadf175760b5\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.273199 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-util\") pod \"695aef87-f06b-45ea-a3c1-aadf175760b5\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.273333 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4jnm\" (UniqueName: \"kubernetes.io/projected/695aef87-f06b-45ea-a3c1-aadf175760b5-kube-api-access-m4jnm\") pod \"695aef87-f06b-45ea-a3c1-aadf175760b5\" (UID: \"695aef87-f06b-45ea-a3c1-aadf175760b5\") " Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.274955 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-bundle" (OuterVolumeSpecName: "bundle") pod "695aef87-f06b-45ea-a3c1-aadf175760b5" (UID: "695aef87-f06b-45ea-a3c1-aadf175760b5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.280898 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/695aef87-f06b-45ea-a3c1-aadf175760b5-kube-api-access-m4jnm" (OuterVolumeSpecName: "kube-api-access-m4jnm") pod "695aef87-f06b-45ea-a3c1-aadf175760b5" (UID: "695aef87-f06b-45ea-a3c1-aadf175760b5"). InnerVolumeSpecName "kube-api-access-m4jnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.293574 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-util" (OuterVolumeSpecName: "util") pod "695aef87-f06b-45ea-a3c1-aadf175760b5" (UID: "695aef87-f06b-45ea-a3c1-aadf175760b5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.374879 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4jnm\" (UniqueName: \"kubernetes.io/projected/695aef87-f06b-45ea-a3c1-aadf175760b5-kube-api-access-m4jnm\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.374940 4835 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.374958 4835 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/695aef87-f06b-45ea-a3c1-aadf175760b5-util\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.931916 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" event={"ID":"695aef87-f06b-45ea-a3c1-aadf175760b5","Type":"ContainerDied","Data":"b608fd0224213a647f8c337a347fd622cdd2475f930027ff2670978e603957f3"} Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.932354 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b608fd0224213a647f8c337a347fd622cdd2475f930027ff2670978e603957f3" Feb 02 17:03:34 crc kubenswrapper[4835]: I0202 17:03:34.931966 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.741630 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gn7rm"] Feb 02 17:03:39 crc kubenswrapper[4835]: E0202 17:03:39.742441 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aad2f2e8-6800-4238-a0ab-ee3304bad4c1" containerName="console" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.742459 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="aad2f2e8-6800-4238-a0ab-ee3304bad4c1" containerName="console" Feb 02 17:03:39 crc kubenswrapper[4835]: E0202 17:03:39.742482 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="695aef87-f06b-45ea-a3c1-aadf175760b5" containerName="pull" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.742490 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="695aef87-f06b-45ea-a3c1-aadf175760b5" containerName="pull" Feb 02 17:03:39 crc kubenswrapper[4835]: E0202 17:03:39.742499 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="695aef87-f06b-45ea-a3c1-aadf175760b5" containerName="extract" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.742506 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="695aef87-f06b-45ea-a3c1-aadf175760b5" containerName="extract" Feb 02 17:03:39 crc kubenswrapper[4835]: E0202 17:03:39.742519 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="695aef87-f06b-45ea-a3c1-aadf175760b5" containerName="util" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.742527 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="695aef87-f06b-45ea-a3c1-aadf175760b5" containerName="util" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.742651 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="695aef87-f06b-45ea-a3c1-aadf175760b5" containerName="extract" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.742672 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="aad2f2e8-6800-4238-a0ab-ee3304bad4c1" containerName="console" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.743461 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.764362 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn7rm"] Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.841944 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-catalog-content\") pod \"redhat-marketplace-gn7rm\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.841978 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lk7l\" (UniqueName: \"kubernetes.io/projected/3d99def3-2324-49f9-88d8-23f6ee20db3a-kube-api-access-8lk7l\") pod \"redhat-marketplace-gn7rm\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.841999 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-utilities\") pod \"redhat-marketplace-gn7rm\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.943726 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-catalog-content\") pod \"redhat-marketplace-gn7rm\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.943773 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lk7l\" (UniqueName: \"kubernetes.io/projected/3d99def3-2324-49f9-88d8-23f6ee20db3a-kube-api-access-8lk7l\") pod \"redhat-marketplace-gn7rm\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.943795 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-utilities\") pod \"redhat-marketplace-gn7rm\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.944361 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-utilities\") pod \"redhat-marketplace-gn7rm\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.944619 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-catalog-content\") pod \"redhat-marketplace-gn7rm\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:39 crc kubenswrapper[4835]: I0202 17:03:39.963453 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lk7l\" (UniqueName: \"kubernetes.io/projected/3d99def3-2324-49f9-88d8-23f6ee20db3a-kube-api-access-8lk7l\") pod \"redhat-marketplace-gn7rm\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:40 crc kubenswrapper[4835]: I0202 17:03:40.064049 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:40 crc kubenswrapper[4835]: I0202 17:03:40.501751 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn7rm"] Feb 02 17:03:40 crc kubenswrapper[4835]: I0202 17:03:40.976671 4835 generic.go:334] "Generic (PLEG): container finished" podID="3d99def3-2324-49f9-88d8-23f6ee20db3a" containerID="c2fed362f749cd64ccad06225dbd9d568531122da962db1441e2490614e4fccd" exitCode=0 Feb 02 17:03:40 crc kubenswrapper[4835]: I0202 17:03:40.976725 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn7rm" event={"ID":"3d99def3-2324-49f9-88d8-23f6ee20db3a","Type":"ContainerDied","Data":"c2fed362f749cd64ccad06225dbd9d568531122da962db1441e2490614e4fccd"} Feb 02 17:03:40 crc kubenswrapper[4835]: I0202 17:03:40.976752 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn7rm" event={"ID":"3d99def3-2324-49f9-88d8-23f6ee20db3a","Type":"ContainerStarted","Data":"424b101c2303cbbab09059a75072b5c2666b6f10a6ad3d1ee6e85c57e580477b"} Feb 02 17:03:41 crc kubenswrapper[4835]: I0202 17:03:41.983045 4835 generic.go:334] "Generic (PLEG): container finished" podID="3d99def3-2324-49f9-88d8-23f6ee20db3a" containerID="b39cb191db77e558e2cc81e7b0e353e8323ac8381f97d392fd1b1adc6c8e296e" exitCode=0 Feb 02 17:03:41 crc kubenswrapper[4835]: I0202 17:03:41.983140 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn7rm" event={"ID":"3d99def3-2324-49f9-88d8-23f6ee20db3a","Type":"ContainerDied","Data":"b39cb191db77e558e2cc81e7b0e353e8323ac8381f97d392fd1b1adc6c8e296e"} Feb 02 17:03:42 crc kubenswrapper[4835]: I0202 17:03:42.991209 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn7rm" event={"ID":"3d99def3-2324-49f9-88d8-23f6ee20db3a","Type":"ContainerStarted","Data":"922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937"} Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.738294 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gn7rm" podStartSLOduration=3.298000562 podStartE2EDuration="4.738260228s" podCreationTimestamp="2026-02-02 17:03:39 +0000 UTC" firstStartedPulling="2026-02-02 17:03:40.978283613 +0000 UTC m=+812.599887693" lastFinishedPulling="2026-02-02 17:03:42.418543279 +0000 UTC m=+814.040147359" observedRunningTime="2026-02-02 17:03:43.030751162 +0000 UTC m=+814.652355252" watchObservedRunningTime="2026-02-02 17:03:43.738260228 +0000 UTC m=+815.359864308" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.742122 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7pzhc"] Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.743261 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.755156 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7pzhc"] Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.890465 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-b7878cd68-s889h"] Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.891372 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.892457 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-utilities\") pod \"community-operators-7pzhc\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.892531 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff2nj\" (UniqueName: \"kubernetes.io/projected/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-kube-api-access-ff2nj\") pod \"community-operators-7pzhc\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.892561 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-catalog-content\") pod \"community-operators-7pzhc\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.893592 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.894045 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-9fpdv" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.894225 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.895234 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.896353 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.904463 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-b7878cd68-s889h"] Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.993394 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/846128ff-a92d-40b9-835b-3184cb35de48-webhook-cert\") pod \"metallb-operator-controller-manager-b7878cd68-s889h\" (UID: \"846128ff-a92d-40b9-835b-3184cb35de48\") " pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.993461 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-utilities\") pod \"community-operators-7pzhc\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.993485 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/846128ff-a92d-40b9-835b-3184cb35de48-apiservice-cert\") pod \"metallb-operator-controller-manager-b7878cd68-s889h\" (UID: \"846128ff-a92d-40b9-835b-3184cb35de48\") " pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.993969 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-utilities\") pod \"community-operators-7pzhc\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.994064 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff2nj\" (UniqueName: \"kubernetes.io/projected/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-kube-api-access-ff2nj\") pod \"community-operators-7pzhc\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.994100 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-catalog-content\") pod \"community-operators-7pzhc\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.994154 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khs8p\" (UniqueName: \"kubernetes.io/projected/846128ff-a92d-40b9-835b-3184cb35de48-kube-api-access-khs8p\") pod \"metallb-operator-controller-manager-b7878cd68-s889h\" (UID: \"846128ff-a92d-40b9-835b-3184cb35de48\") " pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:43 crc kubenswrapper[4835]: I0202 17:03:43.994868 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-catalog-content\") pod \"community-operators-7pzhc\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.017219 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff2nj\" (UniqueName: \"kubernetes.io/projected/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-kube-api-access-ff2nj\") pod \"community-operators-7pzhc\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.058966 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.097097 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/846128ff-a92d-40b9-835b-3184cb35de48-apiservice-cert\") pod \"metallb-operator-controller-manager-b7878cd68-s889h\" (UID: \"846128ff-a92d-40b9-835b-3184cb35de48\") " pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.097194 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khs8p\" (UniqueName: \"kubernetes.io/projected/846128ff-a92d-40b9-835b-3184cb35de48-kube-api-access-khs8p\") pod \"metallb-operator-controller-manager-b7878cd68-s889h\" (UID: \"846128ff-a92d-40b9-835b-3184cb35de48\") " pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.097249 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/846128ff-a92d-40b9-835b-3184cb35de48-webhook-cert\") pod \"metallb-operator-controller-manager-b7878cd68-s889h\" (UID: \"846128ff-a92d-40b9-835b-3184cb35de48\") " pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.103148 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/846128ff-a92d-40b9-835b-3184cb35de48-webhook-cert\") pod \"metallb-operator-controller-manager-b7878cd68-s889h\" (UID: \"846128ff-a92d-40b9-835b-3184cb35de48\") " pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.104999 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/846128ff-a92d-40b9-835b-3184cb35de48-apiservice-cert\") pod \"metallb-operator-controller-manager-b7878cd68-s889h\" (UID: \"846128ff-a92d-40b9-835b-3184cb35de48\") " pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.124793 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khs8p\" (UniqueName: \"kubernetes.io/projected/846128ff-a92d-40b9-835b-3184cb35de48-kube-api-access-khs8p\") pod \"metallb-operator-controller-manager-b7878cd68-s889h\" (UID: \"846128ff-a92d-40b9-835b-3184cb35de48\") " pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.144984 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9"] Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.145703 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.150942 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.151174 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-xpvdg" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.151354 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.206011 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9"] Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.206118 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.314632 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6d2b\" (UniqueName: \"kubernetes.io/projected/5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73-kube-api-access-l6d2b\") pod \"metallb-operator-webhook-server-5f9664b4df-s2qs9\" (UID: \"5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73\") " pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.314980 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73-webhook-cert\") pod \"metallb-operator-webhook-server-5f9664b4df-s2qs9\" (UID: \"5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73\") " pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.315046 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73-apiservice-cert\") pod \"metallb-operator-webhook-server-5f9664b4df-s2qs9\" (UID: \"5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73\") " pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.416329 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73-apiservice-cert\") pod \"metallb-operator-webhook-server-5f9664b4df-s2qs9\" (UID: \"5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73\") " pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.416449 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6d2b\" (UniqueName: \"kubernetes.io/projected/5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73-kube-api-access-l6d2b\") pod \"metallb-operator-webhook-server-5f9664b4df-s2qs9\" (UID: \"5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73\") " pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.416484 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73-webhook-cert\") pod \"metallb-operator-webhook-server-5f9664b4df-s2qs9\" (UID: \"5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73\") " pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.422532 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73-apiservice-cert\") pod \"metallb-operator-webhook-server-5f9664b4df-s2qs9\" (UID: \"5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73\") " pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.426723 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73-webhook-cert\") pod \"metallb-operator-webhook-server-5f9664b4df-s2qs9\" (UID: \"5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73\") " pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.441725 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6d2b\" (UniqueName: \"kubernetes.io/projected/5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73-kube-api-access-l6d2b\") pod \"metallb-operator-webhook-server-5f9664b4df-s2qs9\" (UID: \"5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73\") " pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.462921 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.588169 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-b7878cd68-s889h"] Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.631397 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7pzhc"] Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.870630 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.870986 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:03:44 crc kubenswrapper[4835]: I0202 17:03:44.995623 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9"] Feb 02 17:03:45 crc kubenswrapper[4835]: I0202 17:03:45.002610 4835 generic.go:334] "Generic (PLEG): container finished" podID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" containerID="12f3b459840db2ab1aab02c98158c2e6ef360028bbd7ada95ce541d51d5e79cd" exitCode=0 Feb 02 17:03:45 crc kubenswrapper[4835]: I0202 17:03:45.002712 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7pzhc" event={"ID":"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea","Type":"ContainerDied","Data":"12f3b459840db2ab1aab02c98158c2e6ef360028bbd7ada95ce541d51d5e79cd"} Feb 02 17:03:45 crc kubenswrapper[4835]: I0202 17:03:45.002749 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7pzhc" event={"ID":"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea","Type":"ContainerStarted","Data":"9cb048aa3f5b7dc0d6f34a5a1edc603573d29e8a8683b38ea76bcbcb4005f82c"} Feb 02 17:03:45 crc kubenswrapper[4835]: I0202 17:03:45.004256 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" event={"ID":"846128ff-a92d-40b9-835b-3184cb35de48","Type":"ContainerStarted","Data":"4d73406b85047d70e4b23b86bf2d5d2d7932a1389dcfcc903c8f4e4d3117e29e"} Feb 02 17:03:45 crc kubenswrapper[4835]: W0202 17:03:45.006650 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c1b1958_8e0e_40bd_9325_b7e6e4aa4a73.slice/crio-9fd6ec94f4ae3907e327377153ddda2e35a9af3df1071bf6beeb352037fd85cb WatchSource:0}: Error finding container 9fd6ec94f4ae3907e327377153ddda2e35a9af3df1071bf6beeb352037fd85cb: Status 404 returned error can't find the container with id 9fd6ec94f4ae3907e327377153ddda2e35a9af3df1071bf6beeb352037fd85cb Feb 02 17:03:46 crc kubenswrapper[4835]: I0202 17:03:46.021436 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" event={"ID":"5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73","Type":"ContainerStarted","Data":"9fd6ec94f4ae3907e327377153ddda2e35a9af3df1071bf6beeb352037fd85cb"} Feb 02 17:03:46 crc kubenswrapper[4835]: I0202 17:03:46.027158 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7pzhc" event={"ID":"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea","Type":"ContainerStarted","Data":"68a2d6c3b5ab76d6330dce05ebfb92c25742bca2949f1cfaa0b3ce3974e9c350"} Feb 02 17:03:47 crc kubenswrapper[4835]: I0202 17:03:47.049590 4835 generic.go:334] "Generic (PLEG): container finished" podID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" containerID="68a2d6c3b5ab76d6330dce05ebfb92c25742bca2949f1cfaa0b3ce3974e9c350" exitCode=0 Feb 02 17:03:47 crc kubenswrapper[4835]: I0202 17:03:47.049640 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7pzhc" event={"ID":"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea","Type":"ContainerDied","Data":"68a2d6c3b5ab76d6330dce05ebfb92c25742bca2949f1cfaa0b3ce3974e9c350"} Feb 02 17:03:48 crc kubenswrapper[4835]: I0202 17:03:48.069531 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" event={"ID":"846128ff-a92d-40b9-835b-3184cb35de48","Type":"ContainerStarted","Data":"faf9282dd67e4a4fff1fb38c9798beb598197340f8ec2793b4b81ff9c3c99ab8"} Feb 02 17:03:48 crc kubenswrapper[4835]: I0202 17:03:48.070507 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:03:48 crc kubenswrapper[4835]: I0202 17:03:48.091030 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" podStartSLOduration=1.924908958 podStartE2EDuration="5.09100745s" podCreationTimestamp="2026-02-02 17:03:43 +0000 UTC" firstStartedPulling="2026-02-02 17:03:44.602120198 +0000 UTC m=+816.223724278" lastFinishedPulling="2026-02-02 17:03:47.76821869 +0000 UTC m=+819.389822770" observedRunningTime="2026-02-02 17:03:48.087332302 +0000 UTC m=+819.708936372" watchObservedRunningTime="2026-02-02 17:03:48.09100745 +0000 UTC m=+819.712611530" Feb 02 17:03:49 crc kubenswrapper[4835]: I0202 17:03:49.090805 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7pzhc" event={"ID":"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea","Type":"ContainerStarted","Data":"9c6f6834985e235c1b2df1364389193def1240f7468aefcba4943d31c589be9f"} Feb 02 17:03:49 crc kubenswrapper[4835]: I0202 17:03:49.114786 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7pzhc" podStartSLOduration=2.986557477 podStartE2EDuration="6.114767191s" podCreationTimestamp="2026-02-02 17:03:43 +0000 UTC" firstStartedPulling="2026-02-02 17:03:45.003828799 +0000 UTC m=+816.625432879" lastFinishedPulling="2026-02-02 17:03:48.132038513 +0000 UTC m=+819.753642593" observedRunningTime="2026-02-02 17:03:49.11305911 +0000 UTC m=+820.734663190" watchObservedRunningTime="2026-02-02 17:03:49.114767191 +0000 UTC m=+820.736371271" Feb 02 17:03:50 crc kubenswrapper[4835]: I0202 17:03:50.064189 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:50 crc kubenswrapper[4835]: I0202 17:03:50.065028 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:50 crc kubenswrapper[4835]: I0202 17:03:50.100347 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" event={"ID":"5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73","Type":"ContainerStarted","Data":"368ef8a085925daad433e6071242b253d683a1c175adf87d5026bdb63700ed40"} Feb 02 17:03:50 crc kubenswrapper[4835]: I0202 17:03:50.101159 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:03:50 crc kubenswrapper[4835]: I0202 17:03:50.112958 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:50 crc kubenswrapper[4835]: I0202 17:03:50.124181 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" podStartSLOduration=1.266357677 podStartE2EDuration="6.124158557s" podCreationTimestamp="2026-02-02 17:03:44 +0000 UTC" firstStartedPulling="2026-02-02 17:03:45.009055504 +0000 UTC m=+816.630659574" lastFinishedPulling="2026-02-02 17:03:49.866856374 +0000 UTC m=+821.488460454" observedRunningTime="2026-02-02 17:03:50.119838344 +0000 UTC m=+821.741442434" watchObservedRunningTime="2026-02-02 17:03:50.124158557 +0000 UTC m=+821.745762637" Feb 02 17:03:50 crc kubenswrapper[4835]: I0202 17:03:50.165866 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:51 crc kubenswrapper[4835]: I0202 17:03:51.733475 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn7rm"] Feb 02 17:03:52 crc kubenswrapper[4835]: I0202 17:03:52.113501 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gn7rm" podUID="3d99def3-2324-49f9-88d8-23f6ee20db3a" containerName="registry-server" containerID="cri-o://922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937" gracePeriod=2 Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.002160 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.074401 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-catalog-content\") pod \"3d99def3-2324-49f9-88d8-23f6ee20db3a\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.074471 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lk7l\" (UniqueName: \"kubernetes.io/projected/3d99def3-2324-49f9-88d8-23f6ee20db3a-kube-api-access-8lk7l\") pod \"3d99def3-2324-49f9-88d8-23f6ee20db3a\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.074509 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-utilities\") pod \"3d99def3-2324-49f9-88d8-23f6ee20db3a\" (UID: \"3d99def3-2324-49f9-88d8-23f6ee20db3a\") " Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.075532 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-utilities" (OuterVolumeSpecName: "utilities") pod "3d99def3-2324-49f9-88d8-23f6ee20db3a" (UID: "3d99def3-2324-49f9-88d8-23f6ee20db3a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.089537 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d99def3-2324-49f9-88d8-23f6ee20db3a-kube-api-access-8lk7l" (OuterVolumeSpecName: "kube-api-access-8lk7l") pod "3d99def3-2324-49f9-88d8-23f6ee20db3a" (UID: "3d99def3-2324-49f9-88d8-23f6ee20db3a"). InnerVolumeSpecName "kube-api-access-8lk7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.100627 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d99def3-2324-49f9-88d8-23f6ee20db3a" (UID: "3d99def3-2324-49f9-88d8-23f6ee20db3a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.121519 4835 generic.go:334] "Generic (PLEG): container finished" podID="3d99def3-2324-49f9-88d8-23f6ee20db3a" containerID="922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937" exitCode=0 Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.121580 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn7rm" event={"ID":"3d99def3-2324-49f9-88d8-23f6ee20db3a","Type":"ContainerDied","Data":"922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937"} Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.121587 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gn7rm" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.121607 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gn7rm" event={"ID":"3d99def3-2324-49f9-88d8-23f6ee20db3a","Type":"ContainerDied","Data":"424b101c2303cbbab09059a75072b5c2666b6f10a6ad3d1ee6e85c57e580477b"} Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.121639 4835 scope.go:117] "RemoveContainer" containerID="922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.146077 4835 scope.go:117] "RemoveContainer" containerID="b39cb191db77e558e2cc81e7b0e353e8323ac8381f97d392fd1b1adc6c8e296e" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.161461 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn7rm"] Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.173081 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gn7rm"] Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.175283 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.175306 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lk7l\" (UniqueName: \"kubernetes.io/projected/3d99def3-2324-49f9-88d8-23f6ee20db3a-kube-api-access-8lk7l\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.175315 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d99def3-2324-49f9-88d8-23f6ee20db3a-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.175806 4835 scope.go:117] "RemoveContainer" containerID="c2fed362f749cd64ccad06225dbd9d568531122da962db1441e2490614e4fccd" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.197133 4835 scope.go:117] "RemoveContainer" containerID="922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937" Feb 02 17:03:53 crc kubenswrapper[4835]: E0202 17:03:53.199430 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937\": container with ID starting with 922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937 not found: ID does not exist" containerID="922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.199590 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937"} err="failed to get container status \"922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937\": rpc error: code = NotFound desc = could not find container \"922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937\": container with ID starting with 922d660dd4fdd553070b07c299af5852060bea3d18d0d1ab208b64a98a4e5937 not found: ID does not exist" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.199619 4835 scope.go:117] "RemoveContainer" containerID="b39cb191db77e558e2cc81e7b0e353e8323ac8381f97d392fd1b1adc6c8e296e" Feb 02 17:03:53 crc kubenswrapper[4835]: E0202 17:03:53.200468 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b39cb191db77e558e2cc81e7b0e353e8323ac8381f97d392fd1b1adc6c8e296e\": container with ID starting with b39cb191db77e558e2cc81e7b0e353e8323ac8381f97d392fd1b1adc6c8e296e not found: ID does not exist" containerID="b39cb191db77e558e2cc81e7b0e353e8323ac8381f97d392fd1b1adc6c8e296e" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.200512 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b39cb191db77e558e2cc81e7b0e353e8323ac8381f97d392fd1b1adc6c8e296e"} err="failed to get container status \"b39cb191db77e558e2cc81e7b0e353e8323ac8381f97d392fd1b1adc6c8e296e\": rpc error: code = NotFound desc = could not find container \"b39cb191db77e558e2cc81e7b0e353e8323ac8381f97d392fd1b1adc6c8e296e\": container with ID starting with b39cb191db77e558e2cc81e7b0e353e8323ac8381f97d392fd1b1adc6c8e296e not found: ID does not exist" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.200531 4835 scope.go:117] "RemoveContainer" containerID="c2fed362f749cd64ccad06225dbd9d568531122da962db1441e2490614e4fccd" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.200815 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d99def3-2324-49f9-88d8-23f6ee20db3a" path="/var/lib/kubelet/pods/3d99def3-2324-49f9-88d8-23f6ee20db3a/volumes" Feb 02 17:03:53 crc kubenswrapper[4835]: E0202 17:03:53.201321 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2fed362f749cd64ccad06225dbd9d568531122da962db1441e2490614e4fccd\": container with ID starting with c2fed362f749cd64ccad06225dbd9d568531122da962db1441e2490614e4fccd not found: ID does not exist" containerID="c2fed362f749cd64ccad06225dbd9d568531122da962db1441e2490614e4fccd" Feb 02 17:03:53 crc kubenswrapper[4835]: I0202 17:03:53.201354 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2fed362f749cd64ccad06225dbd9d568531122da962db1441e2490614e4fccd"} err="failed to get container status \"c2fed362f749cd64ccad06225dbd9d568531122da962db1441e2490614e4fccd\": rpc error: code = NotFound desc = could not find container \"c2fed362f749cd64ccad06225dbd9d568531122da962db1441e2490614e4fccd\": container with ID starting with c2fed362f749cd64ccad06225dbd9d568531122da962db1441e2490614e4fccd not found: ID does not exist" Feb 02 17:03:54 crc kubenswrapper[4835]: I0202 17:03:54.059373 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:54 crc kubenswrapper[4835]: I0202 17:03:54.059766 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:54 crc kubenswrapper[4835]: I0202 17:03:54.096751 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:54 crc kubenswrapper[4835]: I0202 17:03:54.165417 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:56 crc kubenswrapper[4835]: I0202 17:03:56.741381 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7pzhc"] Feb 02 17:03:56 crc kubenswrapper[4835]: I0202 17:03:56.741632 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7pzhc" podUID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" containerName="registry-server" containerID="cri-o://9c6f6834985e235c1b2df1364389193def1240f7468aefcba4943d31c589be9f" gracePeriod=2 Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.142208 4835 generic.go:334] "Generic (PLEG): container finished" podID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" containerID="9c6f6834985e235c1b2df1364389193def1240f7468aefcba4943d31c589be9f" exitCode=0 Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.142409 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7pzhc" event={"ID":"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea","Type":"ContainerDied","Data":"9c6f6834985e235c1b2df1364389193def1240f7468aefcba4943d31c589be9f"} Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.785297 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.829408 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff2nj\" (UniqueName: \"kubernetes.io/projected/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-kube-api-access-ff2nj\") pod \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.829508 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-catalog-content\") pod \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.829688 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-utilities\") pod \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\" (UID: \"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea\") " Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.830574 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-utilities" (OuterVolumeSpecName: "utilities") pod "92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" (UID: "92ea31d1-68a3-4c68-9c5b-f43f1c8162ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.836525 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-kube-api-access-ff2nj" (OuterVolumeSpecName: "kube-api-access-ff2nj") pod "92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" (UID: "92ea31d1-68a3-4c68-9c5b-f43f1c8162ea"). InnerVolumeSpecName "kube-api-access-ff2nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.877579 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" (UID: "92ea31d1-68a3-4c68-9c5b-f43f1c8162ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.930865 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.930919 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff2nj\" (UniqueName: \"kubernetes.io/projected/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-kube-api-access-ff2nj\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:57 crc kubenswrapper[4835]: I0202 17:03:57.930934 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:03:58 crc kubenswrapper[4835]: I0202 17:03:58.149337 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7pzhc" event={"ID":"92ea31d1-68a3-4c68-9c5b-f43f1c8162ea","Type":"ContainerDied","Data":"9cb048aa3f5b7dc0d6f34a5a1edc603573d29e8a8683b38ea76bcbcb4005f82c"} Feb 02 17:03:58 crc kubenswrapper[4835]: I0202 17:03:58.149385 4835 scope.go:117] "RemoveContainer" containerID="9c6f6834985e235c1b2df1364389193def1240f7468aefcba4943d31c589be9f" Feb 02 17:03:58 crc kubenswrapper[4835]: I0202 17:03:58.149548 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7pzhc" Feb 02 17:03:58 crc kubenswrapper[4835]: I0202 17:03:58.170371 4835 scope.go:117] "RemoveContainer" containerID="68a2d6c3b5ab76d6330dce05ebfb92c25742bca2949f1cfaa0b3ce3974e9c350" Feb 02 17:03:58 crc kubenswrapper[4835]: I0202 17:03:58.196066 4835 scope.go:117] "RemoveContainer" containerID="12f3b459840db2ab1aab02c98158c2e6ef360028bbd7ada95ce541d51d5e79cd" Feb 02 17:03:58 crc kubenswrapper[4835]: I0202 17:03:58.228527 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7pzhc"] Feb 02 17:03:58 crc kubenswrapper[4835]: I0202 17:03:58.236247 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7pzhc"] Feb 02 17:03:59 crc kubenswrapper[4835]: I0202 17:03:59.198698 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" path="/var/lib/kubelet/pods/92ea31d1-68a3-4c68-9c5b-f43f1c8162ea/volumes" Feb 02 17:04:04 crc kubenswrapper[4835]: I0202 17:04:04.468087 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5f9664b4df-s2qs9" Feb 02 17:04:14 crc kubenswrapper[4835]: I0202 17:04:14.870376 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:04:14 crc kubenswrapper[4835]: I0202 17:04:14.871432 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:04:14 crc kubenswrapper[4835]: I0202 17:04:14.871521 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:04:14 crc kubenswrapper[4835]: I0202 17:04:14.872367 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e554e578a75a247804791314d623e05f6091a40930f6f9c01d754a6a53db79cc"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:04:14 crc kubenswrapper[4835]: I0202 17:04:14.872461 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://e554e578a75a247804791314d623e05f6091a40930f6f9c01d754a6a53db79cc" gracePeriod=600 Feb 02 17:04:15 crc kubenswrapper[4835]: I0202 17:04:15.289870 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="e554e578a75a247804791314d623e05f6091a40930f6f9c01d754a6a53db79cc" exitCode=0 Feb 02 17:04:15 crc kubenswrapper[4835]: I0202 17:04:15.290212 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"e554e578a75a247804791314d623e05f6091a40930f6f9c01d754a6a53db79cc"} Feb 02 17:04:15 crc kubenswrapper[4835]: I0202 17:04:15.290248 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"ed5bb6b3343a006060ae2f0f9c428cf6f417413f7227d48031553b98961dab3a"} Feb 02 17:04:15 crc kubenswrapper[4835]: I0202 17:04:15.290312 4835 scope.go:117] "RemoveContainer" containerID="e0c6bc82781affe05cbe6c90c01b78ec721f1da56df0b87b02d404a5427cd6a8" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.209000 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-b7878cd68-s889h" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.910963 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9"] Feb 02 17:04:24 crc kubenswrapper[4835]: E0202 17:04:24.911257 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" containerName="extract-utilities" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.911293 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" containerName="extract-utilities" Feb 02 17:04:24 crc kubenswrapper[4835]: E0202 17:04:24.911311 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d99def3-2324-49f9-88d8-23f6ee20db3a" containerName="extract-content" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.911319 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d99def3-2324-49f9-88d8-23f6ee20db3a" containerName="extract-content" Feb 02 17:04:24 crc kubenswrapper[4835]: E0202 17:04:24.911333 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" containerName="extract-content" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.911344 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" containerName="extract-content" Feb 02 17:04:24 crc kubenswrapper[4835]: E0202 17:04:24.911356 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" containerName="registry-server" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.911365 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" containerName="registry-server" Feb 02 17:04:24 crc kubenswrapper[4835]: E0202 17:04:24.911379 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d99def3-2324-49f9-88d8-23f6ee20db3a" containerName="registry-server" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.911387 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d99def3-2324-49f9-88d8-23f6ee20db3a" containerName="registry-server" Feb 02 17:04:24 crc kubenswrapper[4835]: E0202 17:04:24.911398 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d99def3-2324-49f9-88d8-23f6ee20db3a" containerName="extract-utilities" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.911406 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d99def3-2324-49f9-88d8-23f6ee20db3a" containerName="extract-utilities" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.911526 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d99def3-2324-49f9-88d8-23f6ee20db3a" containerName="registry-server" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.911540 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="92ea31d1-68a3-4c68-9c5b-f43f1c8162ea" containerName="registry-server" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.923983 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-xfhbb"] Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.934212 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.936583 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-95w6r" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.940316 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.944553 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.950449 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.950801 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.969150 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9"] Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.986110 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9308a217-4e09-4f60-a7d8-698cde044a53-metrics-certs\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.986423 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-reloader\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.986509 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9308a217-4e09-4f60-a7d8-698cde044a53-frr-startup\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.986598 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45gtg\" (UniqueName: \"kubernetes.io/projected/8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e-kube-api-access-45gtg\") pod \"frr-k8s-webhook-server-7df86c4f6c-66lj9\" (UID: \"8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.986696 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-frr-conf\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.986764 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-metrics\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.986834 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r8kj\" (UniqueName: \"kubernetes.io/projected/9308a217-4e09-4f60-a7d8-698cde044a53-kube-api-access-6r8kj\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.986914 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-66lj9\" (UID: \"8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" Feb 02 17:04:24 crc kubenswrapper[4835]: I0202 17:04:24.986994 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-frr-sockets\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.013569 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-glqbw"] Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.014826 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.017069 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-8l9fx" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.017330 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.017426 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.017530 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.018155 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-dsndw"] Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.019013 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.019891 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.034669 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-dsndw"] Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.087838 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-frr-conf\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.088132 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-metrics\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.088154 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r8kj\" (UniqueName: \"kubernetes.io/projected/9308a217-4e09-4f60-a7d8-698cde044a53-kube-api-access-6r8kj\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.088182 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-66lj9\" (UID: \"8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.088209 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-frr-sockets\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.088236 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9308a217-4e09-4f60-a7d8-698cde044a53-metrics-certs\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.088252 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-reloader\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.088267 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9308a217-4e09-4f60-a7d8-698cde044a53-frr-startup\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.088321 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45gtg\" (UniqueName: \"kubernetes.io/projected/8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e-kube-api-access-45gtg\") pod \"frr-k8s-webhook-server-7df86c4f6c-66lj9\" (UID: \"8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.089093 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-frr-conf\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.089299 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-frr-sockets\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.089347 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-reloader\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.090438 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9308a217-4e09-4f60-a7d8-698cde044a53-frr-startup\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: E0202 17:04:25.090533 4835 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Feb 02 17:04:25 crc kubenswrapper[4835]: E0202 17:04:25.090581 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9308a217-4e09-4f60-a7d8-698cde044a53-metrics-certs podName:9308a217-4e09-4f60-a7d8-698cde044a53 nodeName:}" failed. No retries permitted until 2026-02-02 17:04:25.590566431 +0000 UTC m=+857.212170511 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9308a217-4e09-4f60-a7d8-698cde044a53-metrics-certs") pod "frr-k8s-xfhbb" (UID: "9308a217-4e09-4f60-a7d8-698cde044a53") : secret "frr-k8s-certs-secret" not found Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.091012 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9308a217-4e09-4f60-a7d8-698cde044a53-metrics\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: E0202 17:04:25.091069 4835 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Feb 02 17:04:25 crc kubenswrapper[4835]: E0202 17:04:25.091224 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e-cert podName:8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e nodeName:}" failed. No retries permitted until 2026-02-02 17:04:25.591214987 +0000 UTC m=+857.212819067 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e-cert") pod "frr-k8s-webhook-server-7df86c4f6c-66lj9" (UID: "8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e") : secret "frr-k8s-webhook-server-cert" not found Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.111915 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45gtg\" (UniqueName: \"kubernetes.io/projected/8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e-kube-api-access-45gtg\") pod \"frr-k8s-webhook-server-7df86c4f6c-66lj9\" (UID: \"8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.127863 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r8kj\" (UniqueName: \"kubernetes.io/projected/9308a217-4e09-4f60-a7d8-698cde044a53-kube-api-access-6r8kj\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.190399 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-metallb-excludel2\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.190465 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9sj5\" (UniqueName: \"kubernetes.io/projected/0390332b-c0b4-4a28-b815-69ad9d9bed13-kube-api-access-v9sj5\") pod \"controller-6968d8fdc4-dsndw\" (UID: \"0390332b-c0b4-4a28-b815-69ad9d9bed13\") " pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.190493 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0390332b-c0b4-4a28-b815-69ad9d9bed13-metrics-certs\") pod \"controller-6968d8fdc4-dsndw\" (UID: \"0390332b-c0b4-4a28-b815-69ad9d9bed13\") " pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.190922 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-memberlist\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.191003 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl5m6\" (UniqueName: \"kubernetes.io/projected/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-kube-api-access-hl5m6\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.191047 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0390332b-c0b4-4a28-b815-69ad9d9bed13-cert\") pod \"controller-6968d8fdc4-dsndw\" (UID: \"0390332b-c0b4-4a28-b815-69ad9d9bed13\") " pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.191101 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-metrics-certs\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.292063 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0390332b-c0b4-4a28-b815-69ad9d9bed13-metrics-certs\") pod \"controller-6968d8fdc4-dsndw\" (UID: \"0390332b-c0b4-4a28-b815-69ad9d9bed13\") " pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.292176 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-memberlist\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.292224 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl5m6\" (UniqueName: \"kubernetes.io/projected/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-kube-api-access-hl5m6\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.292244 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0390332b-c0b4-4a28-b815-69ad9d9bed13-cert\") pod \"controller-6968d8fdc4-dsndw\" (UID: \"0390332b-c0b4-4a28-b815-69ad9d9bed13\") " pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.292316 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-metrics-certs\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.292360 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-metallb-excludel2\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.292381 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9sj5\" (UniqueName: \"kubernetes.io/projected/0390332b-c0b4-4a28-b815-69ad9d9bed13-kube-api-access-v9sj5\") pod \"controller-6968d8fdc4-dsndw\" (UID: \"0390332b-c0b4-4a28-b815-69ad9d9bed13\") " pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:25 crc kubenswrapper[4835]: E0202 17:04:25.292373 4835 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Feb 02 17:04:25 crc kubenswrapper[4835]: E0202 17:04:25.292520 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0390332b-c0b4-4a28-b815-69ad9d9bed13-metrics-certs podName:0390332b-c0b4-4a28-b815-69ad9d9bed13 nodeName:}" failed. No retries permitted until 2026-02-02 17:04:25.792484987 +0000 UTC m=+857.414089227 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0390332b-c0b4-4a28-b815-69ad9d9bed13-metrics-certs") pod "controller-6968d8fdc4-dsndw" (UID: "0390332b-c0b4-4a28-b815-69ad9d9bed13") : secret "controller-certs-secret" not found Feb 02 17:04:25 crc kubenswrapper[4835]: E0202 17:04:25.293694 4835 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 02 17:04:25 crc kubenswrapper[4835]: E0202 17:04:25.294230 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-memberlist podName:9b37cd70-fe2e-406b-a1f2-5aade78f75e4 nodeName:}" failed. No retries permitted until 2026-02-02 17:04:25.794199148 +0000 UTC m=+857.415803228 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-memberlist") pod "speaker-glqbw" (UID: "9b37cd70-fe2e-406b-a1f2-5aade78f75e4") : secret "metallb-memberlist" not found Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.294303 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-metallb-excludel2\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.295376 4835 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.297799 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-metrics-certs\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.308733 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0390332b-c0b4-4a28-b815-69ad9d9bed13-cert\") pod \"controller-6968d8fdc4-dsndw\" (UID: \"0390332b-c0b4-4a28-b815-69ad9d9bed13\") " pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.310552 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl5m6\" (UniqueName: \"kubernetes.io/projected/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-kube-api-access-hl5m6\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.310555 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9sj5\" (UniqueName: \"kubernetes.io/projected/0390332b-c0b4-4a28-b815-69ad9d9bed13-kube-api-access-v9sj5\") pod \"controller-6968d8fdc4-dsndw\" (UID: \"0390332b-c0b4-4a28-b815-69ad9d9bed13\") " pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.595984 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9308a217-4e09-4f60-a7d8-698cde044a53-metrics-certs\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.596384 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-66lj9\" (UID: \"8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.599230 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9308a217-4e09-4f60-a7d8-698cde044a53-metrics-certs\") pod \"frr-k8s-xfhbb\" (UID: \"9308a217-4e09-4f60-a7d8-698cde044a53\") " pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.600710 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-66lj9\" (UID: \"8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.798650 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0390332b-c0b4-4a28-b815-69ad9d9bed13-metrics-certs\") pod \"controller-6968d8fdc4-dsndw\" (UID: \"0390332b-c0b4-4a28-b815-69ad9d9bed13\") " pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.798736 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-memberlist\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:25 crc kubenswrapper[4835]: E0202 17:04:25.798893 4835 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 02 17:04:25 crc kubenswrapper[4835]: E0202 17:04:25.798949 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-memberlist podName:9b37cd70-fe2e-406b-a1f2-5aade78f75e4 nodeName:}" failed. No retries permitted until 2026-02-02 17:04:26.798932517 +0000 UTC m=+858.420536597 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-memberlist") pod "speaker-glqbw" (UID: "9b37cd70-fe2e-406b-a1f2-5aade78f75e4") : secret "metallb-memberlist" not found Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.804975 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0390332b-c0b4-4a28-b815-69ad9d9bed13-metrics-certs\") pod \"controller-6968d8fdc4-dsndw\" (UID: \"0390332b-c0b4-4a28-b815-69ad9d9bed13\") " pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.860073 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.879340 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:25 crc kubenswrapper[4835]: I0202 17:04:25.940651 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:26 crc kubenswrapper[4835]: I0202 17:04:26.164378 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-dsndw"] Feb 02 17:04:26 crc kubenswrapper[4835]: W0202 17:04:26.172727 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0390332b_c0b4_4a28_b815_69ad9d9bed13.slice/crio-7f16a2eeee9a899ddfd12697022fab81e22e203fed8f66a7345b26e8e7e743ef WatchSource:0}: Error finding container 7f16a2eeee9a899ddfd12697022fab81e22e203fed8f66a7345b26e8e7e743ef: Status 404 returned error can't find the container with id 7f16a2eeee9a899ddfd12697022fab81e22e203fed8f66a7345b26e8e7e743ef Feb 02 17:04:26 crc kubenswrapper[4835]: I0202 17:04:26.302437 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9"] Feb 02 17:04:26 crc kubenswrapper[4835]: I0202 17:04:26.356715 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" event={"ID":"8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e","Type":"ContainerStarted","Data":"5add640f28cc48f123257c8ae35f027f2b72bb0692410a588c2a17a5c12f84c3"} Feb 02 17:04:26 crc kubenswrapper[4835]: I0202 17:04:26.358049 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xfhbb" event={"ID":"9308a217-4e09-4f60-a7d8-698cde044a53","Type":"ContainerStarted","Data":"d0be2411cef457442007280e2769272e89cbba07a17d6b1f5eb7c7c0dfc34477"} Feb 02 17:04:26 crc kubenswrapper[4835]: I0202 17:04:26.359723 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-dsndw" event={"ID":"0390332b-c0b4-4a28-b815-69ad9d9bed13","Type":"ContainerStarted","Data":"8cc90c1412fbe780751281a8cef3b79e5ade87ddcbcb9f65bf3b04897abb5670"} Feb 02 17:04:26 crc kubenswrapper[4835]: I0202 17:04:26.359749 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-dsndw" event={"ID":"0390332b-c0b4-4a28-b815-69ad9d9bed13","Type":"ContainerStarted","Data":"7f16a2eeee9a899ddfd12697022fab81e22e203fed8f66a7345b26e8e7e743ef"} Feb 02 17:04:26 crc kubenswrapper[4835]: I0202 17:04:26.813632 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-memberlist\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:26 crc kubenswrapper[4835]: I0202 17:04:26.820578 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9b37cd70-fe2e-406b-a1f2-5aade78f75e4-memberlist\") pod \"speaker-glqbw\" (UID: \"9b37cd70-fe2e-406b-a1f2-5aade78f75e4\") " pod="metallb-system/speaker-glqbw" Feb 02 17:04:26 crc kubenswrapper[4835]: I0202 17:04:26.832953 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-glqbw" Feb 02 17:04:26 crc kubenswrapper[4835]: W0202 17:04:26.859418 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b37cd70_fe2e_406b_a1f2_5aade78f75e4.slice/crio-3b560d346e1432a1520415b9bf8940dcd6e5967ba1329bb7066c6682e3cb7ac6 WatchSource:0}: Error finding container 3b560d346e1432a1520415b9bf8940dcd6e5967ba1329bb7066c6682e3cb7ac6: Status 404 returned error can't find the container with id 3b560d346e1432a1520415b9bf8940dcd6e5967ba1329bb7066c6682e3cb7ac6 Feb 02 17:04:27 crc kubenswrapper[4835]: I0202 17:04:27.366317 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-glqbw" event={"ID":"9b37cd70-fe2e-406b-a1f2-5aade78f75e4","Type":"ContainerStarted","Data":"e485bc49c6db21a6b0b85d27928accc2cd812387c0aa7e38a8dac40d9a0e4331"} Feb 02 17:04:27 crc kubenswrapper[4835]: I0202 17:04:27.366702 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-glqbw" event={"ID":"9b37cd70-fe2e-406b-a1f2-5aade78f75e4","Type":"ContainerStarted","Data":"21565a09777605f76191d95787564ba3c4ae66fbaaf140f3f56e384ed963a131"} Feb 02 17:04:27 crc kubenswrapper[4835]: I0202 17:04:27.366731 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-glqbw" event={"ID":"9b37cd70-fe2e-406b-a1f2-5aade78f75e4","Type":"ContainerStarted","Data":"3b560d346e1432a1520415b9bf8940dcd6e5967ba1329bb7066c6682e3cb7ac6"} Feb 02 17:04:27 crc kubenswrapper[4835]: I0202 17:04:27.366922 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-glqbw" Feb 02 17:04:27 crc kubenswrapper[4835]: I0202 17:04:27.368297 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-dsndw" event={"ID":"0390332b-c0b4-4a28-b815-69ad9d9bed13","Type":"ContainerStarted","Data":"fc14019792224763c2121f4fe021b6245b26b4671e7130833677825f4a09b365"} Feb 02 17:04:27 crc kubenswrapper[4835]: I0202 17:04:27.368520 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:27 crc kubenswrapper[4835]: I0202 17:04:27.391508 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-glqbw" podStartSLOduration=3.39148684 podStartE2EDuration="3.39148684s" podCreationTimestamp="2026-02-02 17:04:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:04:27.387682419 +0000 UTC m=+859.009286509" watchObservedRunningTime="2026-02-02 17:04:27.39148684 +0000 UTC m=+859.013090920" Feb 02 17:04:27 crc kubenswrapper[4835]: I0202 17:04:27.413581 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-dsndw" podStartSLOduration=3.413557159 podStartE2EDuration="3.413557159s" podCreationTimestamp="2026-02-02 17:04:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:04:27.408568409 +0000 UTC m=+859.030172509" watchObservedRunningTime="2026-02-02 17:04:27.413557159 +0000 UTC m=+859.035161239" Feb 02 17:04:33 crc kubenswrapper[4835]: I0202 17:04:33.430717 4835 generic.go:334] "Generic (PLEG): container finished" podID="9308a217-4e09-4f60-a7d8-698cde044a53" containerID="fdcd100100c6a0151c1dc8ca9de1071ce0cfb53c5a369e3acce7240d6c383f12" exitCode=0 Feb 02 17:04:33 crc kubenswrapper[4835]: I0202 17:04:33.431250 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xfhbb" event={"ID":"9308a217-4e09-4f60-a7d8-698cde044a53","Type":"ContainerDied","Data":"fdcd100100c6a0151c1dc8ca9de1071ce0cfb53c5a369e3acce7240d6c383f12"} Feb 02 17:04:33 crc kubenswrapper[4835]: I0202 17:04:33.433847 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" event={"ID":"8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e","Type":"ContainerStarted","Data":"66eb7b1d73fc26d163a20aa1c62907b8f7fc89e9138264aa7bac5ac8d224d1f6"} Feb 02 17:04:33 crc kubenswrapper[4835]: I0202 17:04:33.434022 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" Feb 02 17:04:33 crc kubenswrapper[4835]: I0202 17:04:33.479962 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" podStartSLOduration=2.804467861 podStartE2EDuration="9.479942736s" podCreationTimestamp="2026-02-02 17:04:24 +0000 UTC" firstStartedPulling="2026-02-02 17:04:26.309868045 +0000 UTC m=+857.931472125" lastFinishedPulling="2026-02-02 17:04:32.98534292 +0000 UTC m=+864.606947000" observedRunningTime="2026-02-02 17:04:33.479627489 +0000 UTC m=+865.101231569" watchObservedRunningTime="2026-02-02 17:04:33.479942736 +0000 UTC m=+865.101546816" Feb 02 17:04:34 crc kubenswrapper[4835]: I0202 17:04:34.441985 4835 generic.go:334] "Generic (PLEG): container finished" podID="9308a217-4e09-4f60-a7d8-698cde044a53" containerID="aa0794edca03a5c7f9ccc5a88236e2c500fb3dfb046f52c62b7c19ff91ecdb5c" exitCode=0 Feb 02 17:04:34 crc kubenswrapper[4835]: I0202 17:04:34.442032 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xfhbb" event={"ID":"9308a217-4e09-4f60-a7d8-698cde044a53","Type":"ContainerDied","Data":"aa0794edca03a5c7f9ccc5a88236e2c500fb3dfb046f52c62b7c19ff91ecdb5c"} Feb 02 17:04:35 crc kubenswrapper[4835]: I0202 17:04:35.451507 4835 generic.go:334] "Generic (PLEG): container finished" podID="9308a217-4e09-4f60-a7d8-698cde044a53" containerID="2c72c20182ffe48c3714af446bbcd2d810ec22539f9aeb3f415c780a9c009389" exitCode=0 Feb 02 17:04:35 crc kubenswrapper[4835]: I0202 17:04:35.451574 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xfhbb" event={"ID":"9308a217-4e09-4f60-a7d8-698cde044a53","Type":"ContainerDied","Data":"2c72c20182ffe48c3714af446bbcd2d810ec22539f9aeb3f415c780a9c009389"} Feb 02 17:04:36 crc kubenswrapper[4835]: I0202 17:04:36.466955 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xfhbb" event={"ID":"9308a217-4e09-4f60-a7d8-698cde044a53","Type":"ContainerStarted","Data":"a2c9e0bae1ad8e5a801f2ffb8635215f14e0cbb6a76a728f114f3fbd2a50e3c9"} Feb 02 17:04:36 crc kubenswrapper[4835]: I0202 17:04:36.467265 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xfhbb" event={"ID":"9308a217-4e09-4f60-a7d8-698cde044a53","Type":"ContainerStarted","Data":"fe8d2044cc247892ff30abb0b0eedbc855357a01e2f74b03d91bc6de4e5f0a08"} Feb 02 17:04:36 crc kubenswrapper[4835]: I0202 17:04:36.467308 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:36 crc kubenswrapper[4835]: I0202 17:04:36.467322 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xfhbb" event={"ID":"9308a217-4e09-4f60-a7d8-698cde044a53","Type":"ContainerStarted","Data":"4a826e696d654279b0d4c3c8395aa3af705200da0b67ca9eb2f7faaf00f514fa"} Feb 02 17:04:36 crc kubenswrapper[4835]: I0202 17:04:36.467334 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xfhbb" event={"ID":"9308a217-4e09-4f60-a7d8-698cde044a53","Type":"ContainerStarted","Data":"abf7bb1a4e677b3c2f4c8852c259b2a1f99d2b647a9eaa079192ee2e30fc5325"} Feb 02 17:04:36 crc kubenswrapper[4835]: I0202 17:04:36.467345 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xfhbb" event={"ID":"9308a217-4e09-4f60-a7d8-698cde044a53","Type":"ContainerStarted","Data":"2f6f3f46588a852cabf042a593bc2c729efed5f64d799f2790f943fe769fa261"} Feb 02 17:04:36 crc kubenswrapper[4835]: I0202 17:04:36.510268 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-xfhbb" podStartSLOduration=5.547005758 podStartE2EDuration="12.510243285s" podCreationTimestamp="2026-02-02 17:04:24 +0000 UTC" firstStartedPulling="2026-02-02 17:04:26.03743672 +0000 UTC m=+857.659040810" lastFinishedPulling="2026-02-02 17:04:33.000674247 +0000 UTC m=+864.622278337" observedRunningTime="2026-02-02 17:04:36.503115424 +0000 UTC m=+868.124719504" watchObservedRunningTime="2026-02-02 17:04:36.510243285 +0000 UTC m=+868.131847375" Feb 02 17:04:37 crc kubenswrapper[4835]: I0202 17:04:37.477673 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xfhbb" event={"ID":"9308a217-4e09-4f60-a7d8-698cde044a53","Type":"ContainerStarted","Data":"12e273c6092820b87f60cf2ed046580206e25d158adf7f95c9c9473bb06723e3"} Feb 02 17:04:40 crc kubenswrapper[4835]: I0202 17:04:40.879941 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:40 crc kubenswrapper[4835]: I0202 17:04:40.931774 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:45 crc kubenswrapper[4835]: I0202 17:04:45.866797 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-66lj9" Feb 02 17:04:45 crc kubenswrapper[4835]: I0202 17:04:45.884929 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-xfhbb" Feb 02 17:04:45 crc kubenswrapper[4835]: I0202 17:04:45.944954 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-dsndw" Feb 02 17:04:46 crc kubenswrapper[4835]: I0202 17:04:46.836942 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-glqbw" Feb 02 17:04:49 crc kubenswrapper[4835]: I0202 17:04:49.641342 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-mpcvq"] Feb 02 17:04:49 crc kubenswrapper[4835]: I0202 17:04:49.642311 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mpcvq" Feb 02 17:04:49 crc kubenswrapper[4835]: I0202 17:04:49.645554 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Feb 02 17:04:49 crc kubenswrapper[4835]: I0202 17:04:49.645787 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-fk5ld" Feb 02 17:04:49 crc kubenswrapper[4835]: I0202 17:04:49.645811 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Feb 02 17:04:49 crc kubenswrapper[4835]: I0202 17:04:49.656207 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mpcvq"] Feb 02 17:04:49 crc kubenswrapper[4835]: I0202 17:04:49.724116 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfhcm\" (UniqueName: \"kubernetes.io/projected/4b24e244-cb70-4924-a2bb-576aeb95cd73-kube-api-access-pfhcm\") pod \"openstack-operator-index-mpcvq\" (UID: \"4b24e244-cb70-4924-a2bb-576aeb95cd73\") " pod="openstack-operators/openstack-operator-index-mpcvq" Feb 02 17:04:49 crc kubenswrapper[4835]: I0202 17:04:49.825799 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfhcm\" (UniqueName: \"kubernetes.io/projected/4b24e244-cb70-4924-a2bb-576aeb95cd73-kube-api-access-pfhcm\") pod \"openstack-operator-index-mpcvq\" (UID: \"4b24e244-cb70-4924-a2bb-576aeb95cd73\") " pod="openstack-operators/openstack-operator-index-mpcvq" Feb 02 17:04:49 crc kubenswrapper[4835]: I0202 17:04:49.848834 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfhcm\" (UniqueName: \"kubernetes.io/projected/4b24e244-cb70-4924-a2bb-576aeb95cd73-kube-api-access-pfhcm\") pod \"openstack-operator-index-mpcvq\" (UID: \"4b24e244-cb70-4924-a2bb-576aeb95cd73\") " pod="openstack-operators/openstack-operator-index-mpcvq" Feb 02 17:04:50 crc kubenswrapper[4835]: I0202 17:04:50.018020 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mpcvq" Feb 02 17:04:50 crc kubenswrapper[4835]: I0202 17:04:50.227478 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mpcvq"] Feb 02 17:04:50 crc kubenswrapper[4835]: I0202 17:04:50.583634 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mpcvq" event={"ID":"4b24e244-cb70-4924-a2bb-576aeb95cd73","Type":"ContainerStarted","Data":"8278ba05cf7d621f499ffcfd8c819cd34c9489777cffad47c7fdcd20bd08747c"} Feb 02 17:04:53 crc kubenswrapper[4835]: I0202 17:04:53.216249 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-mpcvq"] Feb 02 17:04:53 crc kubenswrapper[4835]: I0202 17:04:53.608986 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mpcvq" event={"ID":"4b24e244-cb70-4924-a2bb-576aeb95cd73","Type":"ContainerStarted","Data":"410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce"} Feb 02 17:04:53 crc kubenswrapper[4835]: I0202 17:04:53.626168 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-mpcvq" podStartSLOduration=2.350276339 podStartE2EDuration="4.626150099s" podCreationTimestamp="2026-02-02 17:04:49 +0000 UTC" firstStartedPulling="2026-02-02 17:04:50.244935225 +0000 UTC m=+881.866539305" lastFinishedPulling="2026-02-02 17:04:52.520808985 +0000 UTC m=+884.142413065" observedRunningTime="2026-02-02 17:04:53.623464385 +0000 UTC m=+885.245068455" watchObservedRunningTime="2026-02-02 17:04:53.626150099 +0000 UTC m=+885.247754179" Feb 02 17:04:53 crc kubenswrapper[4835]: I0202 17:04:53.822948 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-chdtk"] Feb 02 17:04:53 crc kubenswrapper[4835]: I0202 17:04:53.823792 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-chdtk" Feb 02 17:04:53 crc kubenswrapper[4835]: I0202 17:04:53.832576 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-chdtk"] Feb 02 17:04:53 crc kubenswrapper[4835]: I0202 17:04:53.980857 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x94zp\" (UniqueName: \"kubernetes.io/projected/4a0bb1dd-84ba-4d22-812d-b76e81c5b054-kube-api-access-x94zp\") pod \"openstack-operator-index-chdtk\" (UID: \"4a0bb1dd-84ba-4d22-812d-b76e81c5b054\") " pod="openstack-operators/openstack-operator-index-chdtk" Feb 02 17:04:54 crc kubenswrapper[4835]: I0202 17:04:54.082847 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x94zp\" (UniqueName: \"kubernetes.io/projected/4a0bb1dd-84ba-4d22-812d-b76e81c5b054-kube-api-access-x94zp\") pod \"openstack-operator-index-chdtk\" (UID: \"4a0bb1dd-84ba-4d22-812d-b76e81c5b054\") " pod="openstack-operators/openstack-operator-index-chdtk" Feb 02 17:04:54 crc kubenswrapper[4835]: I0202 17:04:54.103367 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x94zp\" (UniqueName: \"kubernetes.io/projected/4a0bb1dd-84ba-4d22-812d-b76e81c5b054-kube-api-access-x94zp\") pod \"openstack-operator-index-chdtk\" (UID: \"4a0bb1dd-84ba-4d22-812d-b76e81c5b054\") " pod="openstack-operators/openstack-operator-index-chdtk" Feb 02 17:04:54 crc kubenswrapper[4835]: I0202 17:04:54.140749 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-chdtk" Feb 02 17:04:54 crc kubenswrapper[4835]: I0202 17:04:54.354153 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-chdtk"] Feb 02 17:04:54 crc kubenswrapper[4835]: W0202 17:04:54.363394 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a0bb1dd_84ba_4d22_812d_b76e81c5b054.slice/crio-107b01acfeaab120696df817a1129f17ca43697189e24587034f2c970faa6f2c WatchSource:0}: Error finding container 107b01acfeaab120696df817a1129f17ca43697189e24587034f2c970faa6f2c: Status 404 returned error can't find the container with id 107b01acfeaab120696df817a1129f17ca43697189e24587034f2c970faa6f2c Feb 02 17:04:54 crc kubenswrapper[4835]: I0202 17:04:54.616683 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-chdtk" event={"ID":"4a0bb1dd-84ba-4d22-812d-b76e81c5b054","Type":"ContainerStarted","Data":"49c00617efebc0f0c3f24d6801027387b1769947b62230cb0bfcbcab3da0eedc"} Feb 02 17:04:54 crc kubenswrapper[4835]: I0202 17:04:54.616966 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-chdtk" event={"ID":"4a0bb1dd-84ba-4d22-812d-b76e81c5b054","Type":"ContainerStarted","Data":"107b01acfeaab120696df817a1129f17ca43697189e24587034f2c970faa6f2c"} Feb 02 17:04:54 crc kubenswrapper[4835]: I0202 17:04:54.617389 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-mpcvq" podUID="4b24e244-cb70-4924-a2bb-576aeb95cd73" containerName="registry-server" containerID="cri-o://410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce" gracePeriod=2 Feb 02 17:04:54 crc kubenswrapper[4835]: I0202 17:04:54.635460 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-chdtk" podStartSLOduration=1.57267379 podStartE2EDuration="1.635443033s" podCreationTimestamp="2026-02-02 17:04:53 +0000 UTC" firstStartedPulling="2026-02-02 17:04:54.367468955 +0000 UTC m=+885.989073025" lastFinishedPulling="2026-02-02 17:04:54.430238168 +0000 UTC m=+886.051842268" observedRunningTime="2026-02-02 17:04:54.63405171 +0000 UTC m=+886.255655790" watchObservedRunningTime="2026-02-02 17:04:54.635443033 +0000 UTC m=+886.257047113" Feb 02 17:04:54 crc kubenswrapper[4835]: I0202 17:04:54.950384 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mpcvq" Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.097254 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfhcm\" (UniqueName: \"kubernetes.io/projected/4b24e244-cb70-4924-a2bb-576aeb95cd73-kube-api-access-pfhcm\") pod \"4b24e244-cb70-4924-a2bb-576aeb95cd73\" (UID: \"4b24e244-cb70-4924-a2bb-576aeb95cd73\") " Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.107977 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b24e244-cb70-4924-a2bb-576aeb95cd73-kube-api-access-pfhcm" (OuterVolumeSpecName: "kube-api-access-pfhcm") pod "4b24e244-cb70-4924-a2bb-576aeb95cd73" (UID: "4b24e244-cb70-4924-a2bb-576aeb95cd73"). InnerVolumeSpecName "kube-api-access-pfhcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.199773 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfhcm\" (UniqueName: \"kubernetes.io/projected/4b24e244-cb70-4924-a2bb-576aeb95cd73-kube-api-access-pfhcm\") on node \"crc\" DevicePath \"\"" Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.626311 4835 generic.go:334] "Generic (PLEG): container finished" podID="4b24e244-cb70-4924-a2bb-576aeb95cd73" containerID="410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce" exitCode=0 Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.626391 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mpcvq" Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.626419 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mpcvq" event={"ID":"4b24e244-cb70-4924-a2bb-576aeb95cd73","Type":"ContainerDied","Data":"410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce"} Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.626478 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mpcvq" event={"ID":"4b24e244-cb70-4924-a2bb-576aeb95cd73","Type":"ContainerDied","Data":"8278ba05cf7d621f499ffcfd8c819cd34c9489777cffad47c7fdcd20bd08747c"} Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.627003 4835 scope.go:117] "RemoveContainer" containerID="410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce" Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.650633 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-mpcvq"] Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.656177 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-mpcvq"] Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.657037 4835 scope.go:117] "RemoveContainer" containerID="410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce" Feb 02 17:04:55 crc kubenswrapper[4835]: E0202 17:04:55.657667 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce\": container with ID starting with 410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce not found: ID does not exist" containerID="410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce" Feb 02 17:04:55 crc kubenswrapper[4835]: I0202 17:04:55.657703 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce"} err="failed to get container status \"410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce\": rpc error: code = NotFound desc = could not find container \"410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce\": container with ID starting with 410e53725148515979594d28def67a3a93498ad7f23632b6b1129d9ccf9526ce not found: ID does not exist" Feb 02 17:04:57 crc kubenswrapper[4835]: I0202 17:04:57.196960 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b24e244-cb70-4924-a2bb-576aeb95cd73" path="/var/lib/kubelet/pods/4b24e244-cb70-4924-a2bb-576aeb95cd73/volumes" Feb 02 17:05:04 crc kubenswrapper[4835]: I0202 17:05:04.142075 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-chdtk" Feb 02 17:05:04 crc kubenswrapper[4835]: I0202 17:05:04.142407 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-chdtk" Feb 02 17:05:04 crc kubenswrapper[4835]: I0202 17:05:04.182811 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-chdtk" Feb 02 17:05:04 crc kubenswrapper[4835]: I0202 17:05:04.729847 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-chdtk" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.472677 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd"] Feb 02 17:05:06 crc kubenswrapper[4835]: E0202 17:05:06.473776 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b24e244-cb70-4924-a2bb-576aeb95cd73" containerName="registry-server" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.473814 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b24e244-cb70-4924-a2bb-576aeb95cd73" containerName="registry-server" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.474087 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b24e244-cb70-4924-a2bb-576aeb95cd73" containerName="registry-server" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.476129 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.478136 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd"] Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.479513 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-s6pr7" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.567915 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-bundle\") pod \"7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.568181 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-util\") pod \"7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.568304 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rljd\" (UniqueName: \"kubernetes.io/projected/dbcee8eb-eae6-490a-be35-8b24fef3ed83-kube-api-access-6rljd\") pod \"7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.669903 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rljd\" (UniqueName: \"kubernetes.io/projected/dbcee8eb-eae6-490a-be35-8b24fef3ed83-kube-api-access-6rljd\") pod \"7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.669958 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-bundle\") pod \"7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.670011 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-util\") pod \"7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.670509 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-util\") pod \"7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.670634 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-bundle\") pod \"7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.689082 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rljd\" (UniqueName: \"kubernetes.io/projected/dbcee8eb-eae6-490a-be35-8b24fef3ed83-kube-api-access-6rljd\") pod \"7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:06 crc kubenswrapper[4835]: I0202 17:05:06.825286 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:07 crc kubenswrapper[4835]: I0202 17:05:07.030044 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd"] Feb 02 17:05:07 crc kubenswrapper[4835]: I0202 17:05:07.714775 4835 generic.go:334] "Generic (PLEG): container finished" podID="dbcee8eb-eae6-490a-be35-8b24fef3ed83" containerID="50ddbdac6a031935c0b095967d2b194933b0d68dcace40bfeb6d534419ff387e" exitCode=0 Feb 02 17:05:07 crc kubenswrapper[4835]: I0202 17:05:07.714887 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" event={"ID":"dbcee8eb-eae6-490a-be35-8b24fef3ed83","Type":"ContainerDied","Data":"50ddbdac6a031935c0b095967d2b194933b0d68dcace40bfeb6d534419ff387e"} Feb 02 17:05:07 crc kubenswrapper[4835]: I0202 17:05:07.715114 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" event={"ID":"dbcee8eb-eae6-490a-be35-8b24fef3ed83","Type":"ContainerStarted","Data":"18755c6f98bed30e1b5c0932381bad5825fe24160329cdb16b9b3fb5cafa66f9"} Feb 02 17:05:08 crc kubenswrapper[4835]: I0202 17:05:08.731081 4835 generic.go:334] "Generic (PLEG): container finished" podID="dbcee8eb-eae6-490a-be35-8b24fef3ed83" containerID="10a61b09ba1fd13bd6d8984f2950cccd1009c3bd3b674449a8acba0bbd8c2a1a" exitCode=0 Feb 02 17:05:08 crc kubenswrapper[4835]: I0202 17:05:08.731143 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" event={"ID":"dbcee8eb-eae6-490a-be35-8b24fef3ed83","Type":"ContainerDied","Data":"10a61b09ba1fd13bd6d8984f2950cccd1009c3bd3b674449a8acba0bbd8c2a1a"} Feb 02 17:05:09 crc kubenswrapper[4835]: I0202 17:05:09.740369 4835 generic.go:334] "Generic (PLEG): container finished" podID="dbcee8eb-eae6-490a-be35-8b24fef3ed83" containerID="82fb6b8a001e90c34ce32dda5ee1b1d0332edbb5398624e58e48887ab9adc735" exitCode=0 Feb 02 17:05:09 crc kubenswrapper[4835]: I0202 17:05:09.740437 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" event={"ID":"dbcee8eb-eae6-490a-be35-8b24fef3ed83","Type":"ContainerDied","Data":"82fb6b8a001e90c34ce32dda5ee1b1d0332edbb5398624e58e48887ab9adc735"} Feb 02 17:05:10 crc kubenswrapper[4835]: I0202 17:05:10.976799 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.029685 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-util\") pod \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.029751 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-bundle\") pod \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.029783 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rljd\" (UniqueName: \"kubernetes.io/projected/dbcee8eb-eae6-490a-be35-8b24fef3ed83-kube-api-access-6rljd\") pod \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\" (UID: \"dbcee8eb-eae6-490a-be35-8b24fef3ed83\") " Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.030680 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-bundle" (OuterVolumeSpecName: "bundle") pod "dbcee8eb-eae6-490a-be35-8b24fef3ed83" (UID: "dbcee8eb-eae6-490a-be35-8b24fef3ed83"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.035978 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbcee8eb-eae6-490a-be35-8b24fef3ed83-kube-api-access-6rljd" (OuterVolumeSpecName: "kube-api-access-6rljd") pod "dbcee8eb-eae6-490a-be35-8b24fef3ed83" (UID: "dbcee8eb-eae6-490a-be35-8b24fef3ed83"). InnerVolumeSpecName "kube-api-access-6rljd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.043234 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-util" (OuterVolumeSpecName: "util") pod "dbcee8eb-eae6-490a-be35-8b24fef3ed83" (UID: "dbcee8eb-eae6-490a-be35-8b24fef3ed83"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.131658 4835 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-util\") on node \"crc\" DevicePath \"\"" Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.131725 4835 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dbcee8eb-eae6-490a-be35-8b24fef3ed83-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.131737 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rljd\" (UniqueName: \"kubernetes.io/projected/dbcee8eb-eae6-490a-be35-8b24fef3ed83-kube-api-access-6rljd\") on node \"crc\" DevicePath \"\"" Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.753984 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" event={"ID":"dbcee8eb-eae6-490a-be35-8b24fef3ed83","Type":"ContainerDied","Data":"18755c6f98bed30e1b5c0932381bad5825fe24160329cdb16b9b3fb5cafa66f9"} Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.754030 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18755c6f98bed30e1b5c0932381bad5825fe24160329cdb16b9b3fb5cafa66f9" Feb 02 17:05:11 crc kubenswrapper[4835]: I0202 17:05:11.754035 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.162382 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf"] Feb 02 17:05:19 crc kubenswrapper[4835]: E0202 17:05:19.163204 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbcee8eb-eae6-490a-be35-8b24fef3ed83" containerName="util" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.163221 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbcee8eb-eae6-490a-be35-8b24fef3ed83" containerName="util" Feb 02 17:05:19 crc kubenswrapper[4835]: E0202 17:05:19.163245 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbcee8eb-eae6-490a-be35-8b24fef3ed83" containerName="extract" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.163252 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbcee8eb-eae6-490a-be35-8b24fef3ed83" containerName="extract" Feb 02 17:05:19 crc kubenswrapper[4835]: E0202 17:05:19.163282 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbcee8eb-eae6-490a-be35-8b24fef3ed83" containerName="pull" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.163291 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbcee8eb-eae6-490a-be35-8b24fef3ed83" containerName="pull" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.163432 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbcee8eb-eae6-490a-be35-8b24fef3ed83" containerName="extract" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.164121 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.166411 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-kxksd" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.197589 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf"] Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.242150 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqhlc\" (UniqueName: \"kubernetes.io/projected/2f7d609f-2d42-4252-912a-ccae13d46f7f-kube-api-access-rqhlc\") pod \"openstack-operator-controller-init-6d857fbf88-k28lf\" (UID: \"2f7d609f-2d42-4252-912a-ccae13d46f7f\") " pod="openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.343110 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqhlc\" (UniqueName: \"kubernetes.io/projected/2f7d609f-2d42-4252-912a-ccae13d46f7f-kube-api-access-rqhlc\") pod \"openstack-operator-controller-init-6d857fbf88-k28lf\" (UID: \"2f7d609f-2d42-4252-912a-ccae13d46f7f\") " pod="openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.369446 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqhlc\" (UniqueName: \"kubernetes.io/projected/2f7d609f-2d42-4252-912a-ccae13d46f7f-kube-api-access-rqhlc\") pod \"openstack-operator-controller-init-6d857fbf88-k28lf\" (UID: \"2f7d609f-2d42-4252-912a-ccae13d46f7f\") " pod="openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.482060 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf" Feb 02 17:05:19 crc kubenswrapper[4835]: I0202 17:05:19.943436 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf"] Feb 02 17:05:20 crc kubenswrapper[4835]: I0202 17:05:20.820979 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf" event={"ID":"2f7d609f-2d42-4252-912a-ccae13d46f7f","Type":"ContainerStarted","Data":"737b0e769812c1abcc008ea8fd9aae7bd15d909ae0cccdc7009a30dff18830e8"} Feb 02 17:05:23 crc kubenswrapper[4835]: I0202 17:05:23.840340 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf" event={"ID":"2f7d609f-2d42-4252-912a-ccae13d46f7f","Type":"ContainerStarted","Data":"7fe2a89ea09226d61f06b0b4068eb85cac42ce529eb600b66be7bf7d552e53fc"} Feb 02 17:05:24 crc kubenswrapper[4835]: I0202 17:05:24.847338 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf" Feb 02 17:05:24 crc kubenswrapper[4835]: I0202 17:05:24.882682 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf" podStartSLOduration=2.225564226 podStartE2EDuration="5.882659965s" podCreationTimestamp="2026-02-02 17:05:19 +0000 UTC" firstStartedPulling="2026-02-02 17:05:19.933977715 +0000 UTC m=+911.555581795" lastFinishedPulling="2026-02-02 17:05:23.591073454 +0000 UTC m=+915.212677534" observedRunningTime="2026-02-02 17:05:24.882385497 +0000 UTC m=+916.503989597" watchObservedRunningTime="2026-02-02 17:05:24.882659965 +0000 UTC m=+916.504264065" Feb 02 17:05:29 crc kubenswrapper[4835]: I0202 17:05:29.485562 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-6d857fbf88-k28lf" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.653608 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.655089 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.657429 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-dvx25" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.662928 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.667890 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.668623 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.674552 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-ggxnr" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.679594 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.680297 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.682088 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-lw2ff" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.712613 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kfx8\" (UniqueName: \"kubernetes.io/projected/867a5e63-f2c8-45fe-a65a-a8c3d11de2b3-kube-api-access-7kfx8\") pod \"cinder-operator-controller-manager-5968f45b79-bhwd9\" (UID: \"867a5e63-f2c8-45fe-a65a-a8c3d11de2b3\") " pod="openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.712683 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mhfn\" (UniqueName: \"kubernetes.io/projected/2fa52615-07a4-47bc-8a7c-62565638964e-kube-api-access-8mhfn\") pod \"designate-operator-controller-manager-8f4c5cb64-2smkw\" (UID: \"2fa52615-07a4-47bc-8a7c-62565638964e\") " pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.712769 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zxhr\" (UniqueName: \"kubernetes.io/projected/ccf3b51e-9298-4a5e-ad19-feac0a171056-kube-api-access-7zxhr\") pod \"barbican-operator-controller-manager-fc589b45f-wlvlw\" (UID: \"ccf3b51e-9298-4a5e-ad19-feac0a171056\") " pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.712867 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.732045 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.737971 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.738950 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.744097 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-qjpv6" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.746918 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.747905 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.750624 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-k4f97" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.753451 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.764707 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.772334 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.773097 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.778539 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-nvslz" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.786462 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.787337 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.791433 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.791764 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-clcx4" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.817331 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.817824 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert\") pod \"infra-operator-controller-manager-79955696d6-xx8fb\" (UID: \"cb34a8e8-0047-450d-898b-56164cd6f8c3\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.817874 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zxhr\" (UniqueName: \"kubernetes.io/projected/ccf3b51e-9298-4a5e-ad19-feac0a171056-kube-api-access-7zxhr\") pod \"barbican-operator-controller-manager-fc589b45f-wlvlw\" (UID: \"ccf3b51e-9298-4a5e-ad19-feac0a171056\") " pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.817917 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg9wc\" (UniqueName: \"kubernetes.io/projected/051510bb-9754-4866-932d-53e8f209af3e-kube-api-access-cg9wc\") pod \"glance-operator-controller-manager-5d77f4dbc9-l2d9w\" (UID: \"051510bb-9754-4866-932d-53e8f209af3e\") " pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.817951 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhl44\" (UniqueName: \"kubernetes.io/projected/bfabfeb6-c3aa-4684-8a0a-c53b92a3a8cf-kube-api-access-lhl44\") pod \"horizon-operator-controller-manager-5fb775575f-dpqkl\" (UID: \"bfabfeb6-c3aa-4684-8a0a-c53b92a3a8cf\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.817995 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mhfn\" (UniqueName: \"kubernetes.io/projected/2fa52615-07a4-47bc-8a7c-62565638964e-kube-api-access-8mhfn\") pod \"designate-operator-controller-manager-8f4c5cb64-2smkw\" (UID: \"2fa52615-07a4-47bc-8a7c-62565638964e\") " pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.818020 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kfx8\" (UniqueName: \"kubernetes.io/projected/867a5e63-f2c8-45fe-a65a-a8c3d11de2b3-kube-api-access-7kfx8\") pod \"cinder-operator-controller-manager-5968f45b79-bhwd9\" (UID: \"867a5e63-f2c8-45fe-a65a-a8c3d11de2b3\") " pod="openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.818063 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjvrb\" (UniqueName: \"kubernetes.io/projected/8d738981-de82-4d01-a295-b14401942841-kube-api-access-pjvrb\") pod \"heat-operator-controller-manager-65dc6c8d9c-sf4fj\" (UID: \"8d738981-de82-4d01-a295-b14401942841\") " pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.818098 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sx7m\" (UniqueName: \"kubernetes.io/projected/cb34a8e8-0047-450d-898b-56164cd6f8c3-kube-api-access-9sx7m\") pod \"infra-operator-controller-manager-79955696d6-xx8fb\" (UID: \"cb34a8e8-0047-450d-898b-56164cd6f8c3\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.818182 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.821722 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-mfrcb" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.834055 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.843919 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.844718 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.846366 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mhfn\" (UniqueName: \"kubernetes.io/projected/2fa52615-07a4-47bc-8a7c-62565638964e-kube-api-access-8mhfn\") pod \"designate-operator-controller-manager-8f4c5cb64-2smkw\" (UID: \"2fa52615-07a4-47bc-8a7c-62565638964e\") " pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.847121 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-cvjp5" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.849294 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.849499 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zxhr\" (UniqueName: \"kubernetes.io/projected/ccf3b51e-9298-4a5e-ad19-feac0a171056-kube-api-access-7zxhr\") pod \"barbican-operator-controller-manager-fc589b45f-wlvlw\" (UID: \"ccf3b51e-9298-4a5e-ad19-feac0a171056\") " pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.850517 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kfx8\" (UniqueName: \"kubernetes.io/projected/867a5e63-f2c8-45fe-a65a-a8c3d11de2b3-kube-api-access-7kfx8\") pod \"cinder-operator-controller-manager-5968f45b79-bhwd9\" (UID: \"867a5e63-f2c8-45fe-a65a-a8c3d11de2b3\") " pod="openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.854212 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.863773 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.864859 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.868030 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-dsg26" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.893622 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.898696 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.918340 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.919454 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.920227 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg9wc\" (UniqueName: \"kubernetes.io/projected/051510bb-9754-4866-932d-53e8f209af3e-kube-api-access-cg9wc\") pod \"glance-operator-controller-manager-5d77f4dbc9-l2d9w\" (UID: \"051510bb-9754-4866-932d-53e8f209af3e\") " pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.920288 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhl44\" (UniqueName: \"kubernetes.io/projected/bfabfeb6-c3aa-4684-8a0a-c53b92a3a8cf-kube-api-access-lhl44\") pod \"horizon-operator-controller-manager-5fb775575f-dpqkl\" (UID: \"bfabfeb6-c3aa-4684-8a0a-c53b92a3a8cf\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.920326 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db9rj\" (UniqueName: \"kubernetes.io/projected/60282c99-48f4-4c72-92d2-c92b6720bcf7-kube-api-access-db9rj\") pod \"manila-operator-controller-manager-7775d87d9d-knb4g\" (UID: \"60282c99-48f4-4c72-92d2-c92b6720bcf7\") " pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.920361 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjvrb\" (UniqueName: \"kubernetes.io/projected/8d738981-de82-4d01-a295-b14401942841-kube-api-access-pjvrb\") pod \"heat-operator-controller-manager-65dc6c8d9c-sf4fj\" (UID: \"8d738981-de82-4d01-a295-b14401942841\") " pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.920378 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86mt5\" (UniqueName: \"kubernetes.io/projected/487c0b98-8b52-47fd-84ff-6637b6d79c8c-kube-api-access-86mt5\") pod \"keystone-operator-controller-manager-64469b487f-xdk9w\" (UID: \"487c0b98-8b52-47fd-84ff-6637b6d79c8c\") " pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.920400 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv9hf\" (UniqueName: \"kubernetes.io/projected/668fc23c-0c08-4f7e-839d-6fbcf5f6554d-kube-api-access-zv9hf\") pod \"ironic-operator-controller-manager-87bd9d46f-5bvq9\" (UID: \"668fc23c-0c08-4f7e-839d-6fbcf5f6554d\") " pod="openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.920420 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sx7m\" (UniqueName: \"kubernetes.io/projected/cb34a8e8-0047-450d-898b-56164cd6f8c3-kube-api-access-9sx7m\") pod \"infra-operator-controller-manager-79955696d6-xx8fb\" (UID: \"cb34a8e8-0047-450d-898b-56164cd6f8c3\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.920447 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert\") pod \"infra-operator-controller-manager-79955696d6-xx8fb\" (UID: \"cb34a8e8-0047-450d-898b-56164cd6f8c3\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:05:46 crc kubenswrapper[4835]: E0202 17:05:46.920575 4835 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 17:05:46 crc kubenswrapper[4835]: E0202 17:05:46.920623 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert podName:cb34a8e8-0047-450d-898b-56164cd6f8c3 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:47.420605782 +0000 UTC m=+939.042209862 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert") pod "infra-operator-controller-manager-79955696d6-xx8fb" (UID: "cb34a8e8-0047-450d-898b-56164cd6f8c3") : secret "infra-operator-webhook-server-cert" not found Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.925264 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-tnlnr" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.945824 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.946789 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.950425 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-j7tjt" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.961160 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg9wc\" (UniqueName: \"kubernetes.io/projected/051510bb-9754-4866-932d-53e8f209af3e-kube-api-access-cg9wc\") pod \"glance-operator-controller-manager-5d77f4dbc9-l2d9w\" (UID: \"051510bb-9754-4866-932d-53e8f209af3e\") " pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.961185 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhl44\" (UniqueName: \"kubernetes.io/projected/bfabfeb6-c3aa-4684-8a0a-c53b92a3a8cf-kube-api-access-lhl44\") pod \"horizon-operator-controller-manager-5fb775575f-dpqkl\" (UID: \"bfabfeb6-c3aa-4684-8a0a-c53b92a3a8cf\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.961202 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sx7m\" (UniqueName: \"kubernetes.io/projected/cb34a8e8-0047-450d-898b-56164cd6f8c3-kube-api-access-9sx7m\") pod \"infra-operator-controller-manager-79955696d6-xx8fb\" (UID: \"cb34a8e8-0047-450d-898b-56164cd6f8c3\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.961309 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjvrb\" (UniqueName: \"kubernetes.io/projected/8d738981-de82-4d01-a295-b14401942841-kube-api-access-pjvrb\") pod \"heat-operator-controller-manager-65dc6c8d9c-sf4fj\" (UID: \"8d738981-de82-4d01-a295-b14401942841\") " pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.964313 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5644b66645-89lbp"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.965147 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.967095 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-vpwgj" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.972315 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.977358 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.982818 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.987114 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5644b66645-89lbp"] Feb 02 17:05:46 crc kubenswrapper[4835]: I0202 17:05:46.998742 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.010323 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.020903 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.022072 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db9rj\" (UniqueName: \"kubernetes.io/projected/60282c99-48f4-4c72-92d2-c92b6720bcf7-kube-api-access-db9rj\") pod \"manila-operator-controller-manager-7775d87d9d-knb4g\" (UID: \"60282c99-48f4-4c72-92d2-c92b6720bcf7\") " pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.022130 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86mt5\" (UniqueName: \"kubernetes.io/projected/487c0b98-8b52-47fd-84ff-6637b6d79c8c-kube-api-access-86mt5\") pod \"keystone-operator-controller-manager-64469b487f-xdk9w\" (UID: \"487c0b98-8b52-47fd-84ff-6637b6d79c8c\") " pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.022188 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2f95j\" (UniqueName: \"kubernetes.io/projected/a2b75f19-bcbe-4f09-9652-70f042d4bc29-kube-api-access-2f95j\") pod \"neutron-operator-controller-manager-576995988b-pd7lc\" (UID: \"a2b75f19-bcbe-4f09-9652-70f042d4bc29\") " pod="openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.022237 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv9hf\" (UniqueName: \"kubernetes.io/projected/668fc23c-0c08-4f7e-839d-6fbcf5f6554d-kube-api-access-zv9hf\") pod \"ironic-operator-controller-manager-87bd9d46f-5bvq9\" (UID: \"668fc23c-0c08-4f7e-839d-6fbcf5f6554d\") " pod="openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.022314 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfgpg\" (UniqueName: \"kubernetes.io/projected/9225b13b-9f7b-4e74-8fb2-1cdf6a3a7ce2-kube-api-access-hfgpg\") pod \"mariadb-operator-controller-manager-67bf948998-mt62w\" (UID: \"9225b13b-9f7b-4e74-8fb2-1cdf6a3a7ce2\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.022338 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sc8qb\" (UniqueName: \"kubernetes.io/projected/ac2c47dc-967c-456e-affc-bb3c4ac5b6d0-kube-api-access-sc8qb\") pod \"nova-operator-controller-manager-5644b66645-89lbp\" (UID: \"ac2c47dc-967c-456e-affc-bb3c4ac5b6d0\") " pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.026282 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.035526 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-24sj5" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.064672 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.066315 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86mt5\" (UniqueName: \"kubernetes.io/projected/487c0b98-8b52-47fd-84ff-6637b6d79c8c-kube-api-access-86mt5\") pod \"keystone-operator-controller-manager-64469b487f-xdk9w\" (UID: \"487c0b98-8b52-47fd-84ff-6637b6d79c8c\") " pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.070882 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv9hf\" (UniqueName: \"kubernetes.io/projected/668fc23c-0c08-4f7e-839d-6fbcf5f6554d-kube-api-access-zv9hf\") pod \"ironic-operator-controller-manager-87bd9d46f-5bvq9\" (UID: \"668fc23c-0c08-4f7e-839d-6fbcf5f6554d\") " pod="openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.075186 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.077262 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db9rj\" (UniqueName: \"kubernetes.io/projected/60282c99-48f4-4c72-92d2-c92b6720bcf7-kube-api-access-db9rj\") pod \"manila-operator-controller-manager-7775d87d9d-knb4g\" (UID: \"60282c99-48f4-4c72-92d2-c92b6720bcf7\") " pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.080033 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.108108 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.108930 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.125133 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.125722 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-xmhvd" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.126157 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.129455 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.136879 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.137954 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.138577 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.138823 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-gp9wg" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.139200 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sc8qb\" (UniqueName: \"kubernetes.io/projected/ac2c47dc-967c-456e-affc-bb3c4ac5b6d0-kube-api-access-sc8qb\") pod \"nova-operator-controller-manager-5644b66645-89lbp\" (UID: \"ac2c47dc-967c-456e-affc-bb3c4ac5b6d0\") " pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.139283 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2f95j\" (UniqueName: \"kubernetes.io/projected/a2b75f19-bcbe-4f09-9652-70f042d4bc29-kube-api-access-2f95j\") pod \"neutron-operator-controller-manager-576995988b-pd7lc\" (UID: \"a2b75f19-bcbe-4f09-9652-70f042d4bc29\") " pod="openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.139311 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nr9w\" (UniqueName: \"kubernetes.io/projected/c64313f5-c2dc-4a80-aee6-4c177172598f-kube-api-access-9nr9w\") pod \"octavia-operator-controller-manager-7b89ddb58-vl7xb\" (UID: \"c64313f5-c2dc-4a80-aee6-4c177172598f\") " pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.139337 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmlks\" (UniqueName: \"kubernetes.io/projected/3b504454-3ebc-45b8-8e93-fcab1363ce3c-kube-api-access-wmlks\") pod \"ovn-operator-controller-manager-788c46999f-ccmdf\" (UID: \"3b504454-3ebc-45b8-8e93-fcab1363ce3c\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.139379 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfgpg\" (UniqueName: \"kubernetes.io/projected/9225b13b-9f7b-4e74-8fb2-1cdf6a3a7ce2-kube-api-access-hfgpg\") pod \"mariadb-operator-controller-manager-67bf948998-mt62w\" (UID: \"9225b13b-9f7b-4e74-8fb2-1cdf6a3a7ce2\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.145502 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-84r97" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.156442 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.158775 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.161921 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-7cvwf" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.166963 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfgpg\" (UniqueName: \"kubernetes.io/projected/9225b13b-9f7b-4e74-8fb2-1cdf6a3a7ce2-kube-api-access-hfgpg\") pod \"mariadb-operator-controller-manager-67bf948998-mt62w\" (UID: \"9225b13b-9f7b-4e74-8fb2-1cdf6a3a7ce2\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.183860 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.183916 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.189619 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2f95j\" (UniqueName: \"kubernetes.io/projected/a2b75f19-bcbe-4f09-9652-70f042d4bc29-kube-api-access-2f95j\") pod \"neutron-operator-controller-manager-576995988b-pd7lc\" (UID: \"a2b75f19-bcbe-4f09-9652-70f042d4bc29\") " pod="openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.190107 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sc8qb\" (UniqueName: \"kubernetes.io/projected/ac2c47dc-967c-456e-affc-bb3c4ac5b6d0-kube-api-access-sc8qb\") pod \"nova-operator-controller-manager-5644b66645-89lbp\" (UID: \"ac2c47dc-967c-456e-affc-bb3c4ac5b6d0\") " pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.211186 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.228042 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.241759 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hvpr\" (UniqueName: \"kubernetes.io/projected/af6dd5c2-faa4-407d-b6bc-fffda146240b-kube-api-access-2hvpr\") pod \"placement-operator-controller-manager-5b964cf4cd-kjlbl\" (UID: \"af6dd5c2-faa4-407d-b6bc-fffda146240b\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.241845 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nr9w\" (UniqueName: \"kubernetes.io/projected/c64313f5-c2dc-4a80-aee6-4c177172598f-kube-api-access-9nr9w\") pod \"octavia-operator-controller-manager-7b89ddb58-vl7xb\" (UID: \"c64313f5-c2dc-4a80-aee6-4c177172598f\") " pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.241886 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5vjj\" (UniqueName: \"kubernetes.io/projected/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-kube-api-access-g5vjj\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq\" (UID: \"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.241925 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmlks\" (UniqueName: \"kubernetes.io/projected/3b504454-3ebc-45b8-8e93-fcab1363ce3c-kube-api-access-wmlks\") pod \"ovn-operator-controller-manager-788c46999f-ccmdf\" (UID: \"3b504454-3ebc-45b8-8e93-fcab1363ce3c\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.242060 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq\" (UID: \"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.242141 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sztct\" (UniqueName: \"kubernetes.io/projected/563720e1-311a-4aea-b34b-e6ab1d5d7f44-kube-api-access-sztct\") pod \"swift-operator-controller-manager-7b89fdf75b-lrq25\" (UID: \"563720e1-311a-4aea-b34b-e6ab1d5d7f44\") " pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.251307 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.272666 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmlks\" (UniqueName: \"kubernetes.io/projected/3b504454-3ebc-45b8-8e93-fcab1363ce3c-kube-api-access-wmlks\") pod \"ovn-operator-controller-manager-788c46999f-ccmdf\" (UID: \"3b504454-3ebc-45b8-8e93-fcab1363ce3c\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.274569 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.274637 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.274662 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.275943 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.275968 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.277490 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.283813 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nr9w\" (UniqueName: \"kubernetes.io/projected/c64313f5-c2dc-4a80-aee6-4c177172598f-kube-api-access-9nr9w\") pod \"octavia-operator-controller-manager-7b89ddb58-vl7xb\" (UID: \"c64313f5-c2dc-4a80-aee6-4c177172598f\") " pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.285224 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.285266 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.285396 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.285867 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.285940 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.287141 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-8r6dj" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.287464 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-9gbpb" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.288558 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-6r6gv" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.303581 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.343698 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq\" (UID: \"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.344292 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx4n5\" (UniqueName: \"kubernetes.io/projected/affef4c6-1369-40e7-882d-e0cc06c7a492-kube-api-access-dx4n5\") pod \"telemetry-operator-controller-manager-565849b54-r2xwk\" (UID: \"affef4c6-1369-40e7-882d-e0cc06c7a492\") " pod="openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.344362 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbmzp\" (UniqueName: \"kubernetes.io/projected/eb7ea012-63e3-4108-bb3b-904fd21a7c4c-kube-api-access-zbmzp\") pod \"test-operator-controller-manager-56f8bfcd9f-2klm7\" (UID: \"eb7ea012-63e3-4108-bb3b-904fd21a7c4c\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.344413 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sztct\" (UniqueName: \"kubernetes.io/projected/563720e1-311a-4aea-b34b-e6ab1d5d7f44-kube-api-access-sztct\") pod \"swift-operator-controller-manager-7b89fdf75b-lrq25\" (UID: \"563720e1-311a-4aea-b34b-e6ab1d5d7f44\") " pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25" Feb 02 17:05:47 crc kubenswrapper[4835]: E0202 17:05:47.343883 4835 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 17:05:47 crc kubenswrapper[4835]: E0202 17:05:47.344742 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert podName:eee7ce8b-cbaf-48ff-80d8-92011b4a11fa nodeName:}" failed. No retries permitted until 2026-02-02 17:05:47.844719794 +0000 UTC m=+939.466323874 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" (UID: "eee7ce8b-cbaf-48ff-80d8-92011b4a11fa") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.344773 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hvpr\" (UniqueName: \"kubernetes.io/projected/af6dd5c2-faa4-407d-b6bc-fffda146240b-kube-api-access-2hvpr\") pod \"placement-operator-controller-manager-5b964cf4cd-kjlbl\" (UID: \"af6dd5c2-faa4-407d-b6bc-fffda146240b\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.344821 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5vjj\" (UniqueName: \"kubernetes.io/projected/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-kube-api-access-g5vjj\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq\" (UID: \"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.344894 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjhr4\" (UniqueName: \"kubernetes.io/projected/83fe7277-43df-4e53-b2e1-20ec1c340289-kube-api-access-fjhr4\") pod \"watcher-operator-controller-manager-586b95b788-rrg8c\" (UID: \"83fe7277-43df-4e53-b2e1-20ec1c340289\") " pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.343902 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.367819 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hvpr\" (UniqueName: \"kubernetes.io/projected/af6dd5c2-faa4-407d-b6bc-fffda146240b-kube-api-access-2hvpr\") pod \"placement-operator-controller-manager-5b964cf4cd-kjlbl\" (UID: \"af6dd5c2-faa4-407d-b6bc-fffda146240b\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.371493 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5vjj\" (UniqueName: \"kubernetes.io/projected/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-kube-api-access-g5vjj\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq\" (UID: \"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.371879 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.375131 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sztct\" (UniqueName: \"kubernetes.io/projected/563720e1-311a-4aea-b34b-e6ab1d5d7f44-kube-api-access-sztct\") pod \"swift-operator-controller-manager-7b89fdf75b-lrq25\" (UID: \"563720e1-311a-4aea-b34b-e6ab1d5d7f44\") " pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.410225 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.411480 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.414103 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-7w54s" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.414375 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.414958 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.424162 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.429348 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.446016 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scbbk\" (UniqueName: \"kubernetes.io/projected/5166e3f9-91d3-4a6a-a4af-68e5063aa217-kube-api-access-scbbk\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.446119 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbmzp\" (UniqueName: \"kubernetes.io/projected/eb7ea012-63e3-4108-bb3b-904fd21a7c4c-kube-api-access-zbmzp\") pod \"test-operator-controller-manager-56f8bfcd9f-2klm7\" (UID: \"eb7ea012-63e3-4108-bb3b-904fd21a7c4c\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.446175 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.446236 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.446366 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert\") pod \"infra-operator-controller-manager-79955696d6-xx8fb\" (UID: \"cb34a8e8-0047-450d-898b-56164cd6f8c3\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.446414 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjhr4\" (UniqueName: \"kubernetes.io/projected/83fe7277-43df-4e53-b2e1-20ec1c340289-kube-api-access-fjhr4\") pod \"watcher-operator-controller-manager-586b95b788-rrg8c\" (UID: \"83fe7277-43df-4e53-b2e1-20ec1c340289\") " pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.446477 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx4n5\" (UniqueName: \"kubernetes.io/projected/affef4c6-1369-40e7-882d-e0cc06c7a492-kube-api-access-dx4n5\") pod \"telemetry-operator-controller-manager-565849b54-r2xwk\" (UID: \"affef4c6-1369-40e7-882d-e0cc06c7a492\") " pod="openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk" Feb 02 17:05:47 crc kubenswrapper[4835]: E0202 17:05:47.446868 4835 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 17:05:47 crc kubenswrapper[4835]: E0202 17:05:47.446914 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert podName:cb34a8e8-0047-450d-898b-56164cd6f8c3 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:48.446896648 +0000 UTC m=+940.068500728 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert") pod "infra-operator-controller-manager-79955696d6-xx8fb" (UID: "cb34a8e8-0047-450d-898b-56164cd6f8c3") : secret "infra-operator-webhook-server-cert" not found Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.469262 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjhr4\" (UniqueName: \"kubernetes.io/projected/83fe7277-43df-4e53-b2e1-20ec1c340289-kube-api-access-fjhr4\") pod \"watcher-operator-controller-manager-586b95b788-rrg8c\" (UID: \"83fe7277-43df-4e53-b2e1-20ec1c340289\") " pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.473085 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx4n5\" (UniqueName: \"kubernetes.io/projected/affef4c6-1369-40e7-882d-e0cc06c7a492-kube-api-access-dx4n5\") pod \"telemetry-operator-controller-manager-565849b54-r2xwk\" (UID: \"affef4c6-1369-40e7-882d-e0cc06c7a492\") " pod="openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.475377 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbmzp\" (UniqueName: \"kubernetes.io/projected/eb7ea012-63e3-4108-bb3b-904fd21a7c4c-kube-api-access-zbmzp\") pod \"test-operator-controller-manager-56f8bfcd9f-2klm7\" (UID: \"eb7ea012-63e3-4108-bb3b-904fd21a7c4c\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.492529 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.496528 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.497836 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.499544 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-4jgt5" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.511833 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.537654 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.548824 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scbbk\" (UniqueName: \"kubernetes.io/projected/5166e3f9-91d3-4a6a-a4af-68e5063aa217-kube-api-access-scbbk\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.548902 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlktq\" (UniqueName: \"kubernetes.io/projected/dc9b2536-2284-4bd8-b803-e6dc90e30016-kube-api-access-nlktq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-wdspf\" (UID: \"dc9b2536-2284-4bd8-b803-e6dc90e30016\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.549018 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.549186 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:47 crc kubenswrapper[4835]: E0202 17:05:47.549811 4835 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 17:05:47 crc kubenswrapper[4835]: E0202 17:05:47.549909 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:48.049892715 +0000 UTC m=+939.671496785 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "metrics-server-cert" not found Feb 02 17:05:47 crc kubenswrapper[4835]: E0202 17:05:47.550715 4835 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 17:05:47 crc kubenswrapper[4835]: E0202 17:05:47.550762 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:48.050749949 +0000 UTC m=+939.672354019 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "webhook-server-cert" not found Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.553980 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.596770 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.615105 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scbbk\" (UniqueName: \"kubernetes.io/projected/5166e3f9-91d3-4a6a-a4af-68e5063aa217-kube-api-access-scbbk\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.637916 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.652460 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlktq\" (UniqueName: \"kubernetes.io/projected/dc9b2536-2284-4bd8-b803-e6dc90e30016-kube-api-access-nlktq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-wdspf\" (UID: \"dc9b2536-2284-4bd8-b803-e6dc90e30016\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf" Feb 02 17:05:47 crc kubenswrapper[4835]: W0202 17:05:47.658668 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podccf3b51e_9298_4a5e_ad19_feac0a171056.slice/crio-15ae7f1c71f40482c5e62317cd13a4c432f1433349a1f0e1e2c2f6b695d46f4b WatchSource:0}: Error finding container 15ae7f1c71f40482c5e62317cd13a4c432f1433349a1f0e1e2c2f6b695d46f4b: Status 404 returned error can't find the container with id 15ae7f1c71f40482c5e62317cd13a4c432f1433349a1f0e1e2c2f6b695d46f4b Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.669641 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.678698 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlktq\" (UniqueName: \"kubernetes.io/projected/dc9b2536-2284-4bd8-b803-e6dc90e30016-kube-api-access-nlktq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-wdspf\" (UID: \"dc9b2536-2284-4bd8-b803-e6dc90e30016\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.679018 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.765646 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.788883 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9"] Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.842762 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf" Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.858065 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq\" (UID: \"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:05:47 crc kubenswrapper[4835]: E0202 17:05:47.858264 4835 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 17:05:47 crc kubenswrapper[4835]: E0202 17:05:47.858343 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert podName:eee7ce8b-cbaf-48ff-80d8-92011b4a11fa nodeName:}" failed. No retries permitted until 2026-02-02 17:05:48.858325831 +0000 UTC m=+940.479929911 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" (UID: "eee7ce8b-cbaf-48ff-80d8-92011b4a11fa") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 17:05:47 crc kubenswrapper[4835]: I0202 17:05:47.998336 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" event={"ID":"ccf3b51e-9298-4a5e-ad19-feac0a171056","Type":"ContainerStarted","Data":"15ae7f1c71f40482c5e62317cd13a4c432f1433349a1f0e1e2c2f6b695d46f4b"} Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.000788 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9" event={"ID":"867a5e63-f2c8-45fe-a65a-a8c3d11de2b3","Type":"ContainerStarted","Data":"a216e33d16a29c2e65f997b1a73a811ea6e234938c8ad1a4515aee141d0a7fc8"} Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.009868 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw" event={"ID":"2fa52615-07a4-47bc-8a7c-62565638964e","Type":"ContainerStarted","Data":"e220f902432294760684e1af67d133c7c89715e03f0be53dfa54f5fd62fe8ed9"} Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.076445 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.076617 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.076762 4835 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.076810 4835 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.076823 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:49.076805049 +0000 UTC m=+940.698409129 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "webhook-server-cert" not found Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.076894 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:49.076871941 +0000 UTC m=+940.698476021 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "metrics-server-cert" not found Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.162129 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj"] Feb 02 17:05:48 crc kubenswrapper[4835]: W0202 17:05:48.167999 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d738981_de82_4d01_a295_b14401942841.slice/crio-f6bd7d899dac01c9d81047fa1fb3d63879f6ea0d5d36088e586edfe05b874d9e WatchSource:0}: Error finding container f6bd7d899dac01c9d81047fa1fb3d63879f6ea0d5d36088e586edfe05b874d9e: Status 404 returned error can't find the container with id f6bd7d899dac01c9d81047fa1fb3d63879f6ea0d5d36088e586edfe05b874d9e Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.197405 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.481906 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert\") pod \"infra-operator-controller-manager-79955696d6-xx8fb\" (UID: \"cb34a8e8-0047-450d-898b-56164cd6f8c3\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.482068 4835 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.482152 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert podName:cb34a8e8-0047-450d-898b-56164cd6f8c3 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:50.482129609 +0000 UTC m=+942.103733719 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert") pod "infra-operator-controller-manager-79955696d6-xx8fb" (UID: "cb34a8e8-0047-450d-898b-56164cd6f8c3") : secret "infra-operator-webhook-server-cert" not found Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.558539 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.564477 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.597446 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9"] Feb 02 17:05:48 crc kubenswrapper[4835]: W0202 17:05:48.603434 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaffef4c6_1369_40e7_882d_e0cc06c7a492.slice/crio-62fff7138259b8ffa5c15a13c2dc332b95483943d062a0fdd5e8e4aaa9e7f9d4 WatchSource:0}: Error finding container 62fff7138259b8ffa5c15a13c2dc332b95483943d062a0fdd5e8e4aaa9e7f9d4: Status 404 returned error can't find the container with id 62fff7138259b8ffa5c15a13c2dc332b95483943d062a0fdd5e8e4aaa9e7f9d4 Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.613887 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.618914 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.626095 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.636432 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.640124 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.665422 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5644b66645-89lbp"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.680488 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.686353 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.691400 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.720558 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c"] Feb 02 17:05:48 crc kubenswrapper[4835]: W0202 17:05:48.727869 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac2c47dc_967c_456e_affc_bb3c4ac5b6d0.slice/crio-245c396b4b7c3dcb653f638408ecf69a29597926ec6719856eec51ca3a79b095 WatchSource:0}: Error finding container 245c396b4b7c3dcb653f638408ecf69a29597926ec6719856eec51ca3a79b095: Status 404 returned error can't find the container with id 245c396b4b7c3dcb653f638408ecf69a29597926ec6719856eec51ca3a79b095 Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.746666 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2hvpr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5b964cf4cd-kjlbl_openstack-operators(af6dd5c2-faa4-407d-b6bc-fffda146240b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.748072 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" podUID="af6dd5c2-faa4-407d-b6bc-fffda146240b" Feb 02 17:05:48 crc kubenswrapper[4835]: W0202 17:05:48.759437 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83fe7277_43df_4e53_b2e1_20ec1c340289.slice/crio-0beacdfe4b7b0f58a8086ee6b9ad62c6edd7f75907a9f0ca7e950243e02d6a77 WatchSource:0}: Error finding container 0beacdfe4b7b0f58a8086ee6b9ad62c6edd7f75907a9f0ca7e950243e02d6a77: Status 404 returned error can't find the container with id 0beacdfe4b7b0f58a8086ee6b9ad62c6edd7f75907a9f0ca7e950243e02d6a77 Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.762181 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/nova-operator@sha256:6b951a651861f6e805ceec19cad5a35a8dfe6fd9536acebd3c197ca4659d8a51,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sc8qb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-5644b66645-89lbp_openstack-operators(ac2c47dc-967c-456e-affc-bb3c4ac5b6d0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.766050 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" podUID="ac2c47dc-967c-456e-affc-bb3c4ac5b6d0" Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.787762 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/watcher-operator@sha256:3fd1f7623a4b32505f51f329116f7e13bb4cfd320e920961a5b86441a89326d6,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fjhr4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-586b95b788-rrg8c_openstack-operators(83fe7277-43df-4e53-b2e1-20ec1c340289): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.789493 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" podUID="83fe7277-43df-4e53-b2e1-20ec1c340289" Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.905585 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7"] Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.906138 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq\" (UID: \"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.906361 4835 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.906408 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert podName:eee7ce8b-cbaf-48ff-80d8-92011b4a11fa nodeName:}" failed. No retries permitted until 2026-02-02 17:05:50.906394256 +0000 UTC m=+942.527998336 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" (UID: "eee7ce8b-cbaf-48ff-80d8-92011b4a11fa") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 17:05:48 crc kubenswrapper[4835]: I0202 17:05:48.911537 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf"] Feb 02 17:05:48 crc kubenswrapper[4835]: W0202 17:05:48.924215 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb7ea012_63e3_4108_bb3b_904fd21a7c4c.slice/crio-a6ee21a6f1980ac51c4f9b1ba917967cd9a56e829068fdac5741196264a6a14d WatchSource:0}: Error finding container a6ee21a6f1980ac51c4f9b1ba917967cd9a56e829068fdac5741196264a6a14d: Status 404 returned error can't find the container with id a6ee21a6f1980ac51c4f9b1ba917967cd9a56e829068fdac5741196264a6a14d Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.929308 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zbmzp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-56f8bfcd9f-2klm7_openstack-operators(eb7ea012-63e3-4108-bb3b-904fd21a7c4c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 17:05:48 crc kubenswrapper[4835]: E0202 17:05:48.930658 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" podUID="eb7ea012-63e3-4108-bb3b-904fd21a7c4c" Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.016957 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" event={"ID":"eb7ea012-63e3-4108-bb3b-904fd21a7c4c","Type":"ContainerStarted","Data":"a6ee21a6f1980ac51c4f9b1ba917967cd9a56e829068fdac5741196264a6a14d"} Feb 02 17:05:49 crc kubenswrapper[4835]: E0202 17:05:49.017935 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" podUID="eb7ea012-63e3-4108-bb3b-904fd21a7c4c" Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.018218 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj" event={"ID":"8d738981-de82-4d01-a295-b14401942841","Type":"ContainerStarted","Data":"f6bd7d899dac01c9d81047fa1fb3d63879f6ea0d5d36088e586edfe05b874d9e"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.021767 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" event={"ID":"af6dd5c2-faa4-407d-b6bc-fffda146240b","Type":"ContainerStarted","Data":"42535447694e5e6419a5ae09ab8b54f1254e93a5d977806aff17f1333540fe23"} Feb 02 17:05:49 crc kubenswrapper[4835]: E0202 17:05:49.022978 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" podUID="af6dd5c2-faa4-407d-b6bc-fffda146240b" Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.023562 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" event={"ID":"ac2c47dc-967c-456e-affc-bb3c4ac5b6d0","Type":"ContainerStarted","Data":"245c396b4b7c3dcb653f638408ecf69a29597926ec6719856eec51ca3a79b095"} Feb 02 17:05:49 crc kubenswrapper[4835]: E0202 17:05:49.025311 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/nova-operator@sha256:6b951a651861f6e805ceec19cad5a35a8dfe6fd9536acebd3c197ca4659d8a51\\\"\"" pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" podUID="ac2c47dc-967c-456e-affc-bb3c4ac5b6d0" Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.026584 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25" event={"ID":"563720e1-311a-4aea-b34b-e6ab1d5d7f44","Type":"ContainerStarted","Data":"f39397a2a3250c6403bc49b12fc59f2523864eedadc5e255ebd9ad03a2d150c7"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.028129 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9" event={"ID":"668fc23c-0c08-4f7e-839d-6fbcf5f6554d","Type":"ContainerStarted","Data":"6d80306a923da8a4aac6690d3f40ba48cbc075d60fd9f78e7e83adfd23d08e89"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.029548 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" event={"ID":"83fe7277-43df-4e53-b2e1-20ec1c340289","Type":"ContainerStarted","Data":"0beacdfe4b7b0f58a8086ee6b9ad62c6edd7f75907a9f0ca7e950243e02d6a77"} Feb 02 17:05:49 crc kubenswrapper[4835]: E0202 17:05:49.038470 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/watcher-operator@sha256:3fd1f7623a4b32505f51f329116f7e13bb4cfd320e920961a5b86441a89326d6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" podUID="83fe7277-43df-4e53-b2e1-20ec1c340289" Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.040698 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk" event={"ID":"affef4c6-1369-40e7-882d-e0cc06c7a492","Type":"ContainerStarted","Data":"62fff7138259b8ffa5c15a13c2dc332b95483943d062a0fdd5e8e4aaa9e7f9d4"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.043303 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g" event={"ID":"60282c99-48f4-4c72-92d2-c92b6720bcf7","Type":"ContainerStarted","Data":"fbdf3dceef6875f01fefb192c31a5815aeb1336a271816044cf5db8db0f0446b"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.044216 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb" event={"ID":"c64313f5-c2dc-4a80-aee6-4c177172598f","Type":"ContainerStarted","Data":"1ecb014e9efa3b67cc15cd41a3dc24c622a4878938018a89ab66bdeb930e931c"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.045332 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" event={"ID":"487c0b98-8b52-47fd-84ff-6637b6d79c8c","Type":"ContainerStarted","Data":"c681c84c59fac5178b999d070803f5cdb72922a187612af679c32148266ce0e1"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.046519 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf" event={"ID":"3b504454-3ebc-45b8-8e93-fcab1363ce3c","Type":"ContainerStarted","Data":"e0aa4cbfd34697bdd422f9d6128410f56823342741c90adcc3c09359c66d3814"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.047306 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w" event={"ID":"9225b13b-9f7b-4e74-8fb2-1cdf6a3a7ce2","Type":"ContainerStarted","Data":"8c6cc0652081ef3e97aca7c2a2dd79669148a4df9b3556dd8172afeb391354a0"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.050460 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc" event={"ID":"a2b75f19-bcbe-4f09-9652-70f042d4bc29","Type":"ContainerStarted","Data":"68e8f0d071245a9030433213bd2c3190ab5607c895f5922efe086f6cc0ce421f"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.051895 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl" event={"ID":"bfabfeb6-c3aa-4684-8a0a-c53b92a3a8cf","Type":"ContainerStarted","Data":"2ba0dbbff5f201c5bb1f012f6afe0bc8e331bb85841ea41cd34b62b9767dae2b"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.056868 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w" event={"ID":"051510bb-9754-4866-932d-53e8f209af3e","Type":"ContainerStarted","Data":"97ce8c589759873bbe19a6e2367da3a911489cd876117a69a9f0e2df8543d5af"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.058124 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf" event={"ID":"dc9b2536-2284-4bd8-b803-e6dc90e30016","Type":"ContainerStarted","Data":"516f26c2e41c4b178aa2af078de0b23d995cff7e435d48037d400da6968d0bc8"} Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.110349 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:49 crc kubenswrapper[4835]: I0202 17:05:49.110413 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:49 crc kubenswrapper[4835]: E0202 17:05:49.111072 4835 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 17:05:49 crc kubenswrapper[4835]: E0202 17:05:49.111415 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:51.111375431 +0000 UTC m=+942.732979511 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "metrics-server-cert" not found Feb 02 17:05:49 crc kubenswrapper[4835]: E0202 17:05:49.111747 4835 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 17:05:49 crc kubenswrapper[4835]: E0202 17:05:49.111793 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:51.111778543 +0000 UTC m=+942.733382623 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "webhook-server-cert" not found Feb 02 17:05:50 crc kubenswrapper[4835]: E0202 17:05:50.072084 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/watcher-operator@sha256:3fd1f7623a4b32505f51f329116f7e13bb4cfd320e920961a5b86441a89326d6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" podUID="83fe7277-43df-4e53-b2e1-20ec1c340289" Feb 02 17:05:50 crc kubenswrapper[4835]: E0202 17:05:50.072245 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" podUID="af6dd5c2-faa4-407d-b6bc-fffda146240b" Feb 02 17:05:50 crc kubenswrapper[4835]: E0202 17:05:50.072341 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/nova-operator@sha256:6b951a651861f6e805ceec19cad5a35a8dfe6fd9536acebd3c197ca4659d8a51\\\"\"" pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" podUID="ac2c47dc-967c-456e-affc-bb3c4ac5b6d0" Feb 02 17:05:50 crc kubenswrapper[4835]: E0202 17:05:50.072388 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" podUID="eb7ea012-63e3-4108-bb3b-904fd21a7c4c" Feb 02 17:05:50 crc kubenswrapper[4835]: I0202 17:05:50.530673 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert\") pod \"infra-operator-controller-manager-79955696d6-xx8fb\" (UID: \"cb34a8e8-0047-450d-898b-56164cd6f8c3\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:05:50 crc kubenswrapper[4835]: E0202 17:05:50.530921 4835 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 17:05:50 crc kubenswrapper[4835]: E0202 17:05:50.530980 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert podName:cb34a8e8-0047-450d-898b-56164cd6f8c3 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:54.530960487 +0000 UTC m=+946.152564567 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert") pod "infra-operator-controller-manager-79955696d6-xx8fb" (UID: "cb34a8e8-0047-450d-898b-56164cd6f8c3") : secret "infra-operator-webhook-server-cert" not found Feb 02 17:05:50 crc kubenswrapper[4835]: I0202 17:05:50.935425 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq\" (UID: \"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:05:50 crc kubenswrapper[4835]: E0202 17:05:50.935668 4835 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 17:05:50 crc kubenswrapper[4835]: E0202 17:05:50.935718 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert podName:eee7ce8b-cbaf-48ff-80d8-92011b4a11fa nodeName:}" failed. No retries permitted until 2026-02-02 17:05:54.935703951 +0000 UTC m=+946.557308031 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" (UID: "eee7ce8b-cbaf-48ff-80d8-92011b4a11fa") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 17:05:51 crc kubenswrapper[4835]: I0202 17:05:51.146930 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:51 crc kubenswrapper[4835]: I0202 17:05:51.146980 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:51 crc kubenswrapper[4835]: E0202 17:05:51.147094 4835 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 17:05:51 crc kubenswrapper[4835]: E0202 17:05:51.147138 4835 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 17:05:51 crc kubenswrapper[4835]: E0202 17:05:51.147148 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:55.147132259 +0000 UTC m=+946.768736339 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "metrics-server-cert" not found Feb 02 17:05:51 crc kubenswrapper[4835]: E0202 17:05:51.147229 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:05:55.147210962 +0000 UTC m=+946.768815072 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "webhook-server-cert" not found Feb 02 17:05:54 crc kubenswrapper[4835]: I0202 17:05:54.621060 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert\") pod \"infra-operator-controller-manager-79955696d6-xx8fb\" (UID: \"cb34a8e8-0047-450d-898b-56164cd6f8c3\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:05:54 crc kubenswrapper[4835]: E0202 17:05:54.621192 4835 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 17:05:54 crc kubenswrapper[4835]: E0202 17:05:54.621507 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert podName:cb34a8e8-0047-450d-898b-56164cd6f8c3 nodeName:}" failed. No retries permitted until 2026-02-02 17:06:02.621488703 +0000 UTC m=+954.243092783 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert") pod "infra-operator-controller-manager-79955696d6-xx8fb" (UID: "cb34a8e8-0047-450d-898b-56164cd6f8c3") : secret "infra-operator-webhook-server-cert" not found Feb 02 17:05:55 crc kubenswrapper[4835]: I0202 17:05:55.026807 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq\" (UID: \"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:05:55 crc kubenswrapper[4835]: E0202 17:05:55.026960 4835 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 17:05:55 crc kubenswrapper[4835]: E0202 17:05:55.027030 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert podName:eee7ce8b-cbaf-48ff-80d8-92011b4a11fa nodeName:}" failed. No retries permitted until 2026-02-02 17:06:03.027013058 +0000 UTC m=+954.648617138 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" (UID: "eee7ce8b-cbaf-48ff-80d8-92011b4a11fa") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 17:05:55 crc kubenswrapper[4835]: I0202 17:05:55.230669 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:55 crc kubenswrapper[4835]: I0202 17:05:55.230747 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:05:55 crc kubenswrapper[4835]: E0202 17:05:55.231115 4835 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 17:05:55 crc kubenswrapper[4835]: E0202 17:05:55.231258 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:06:03.231225102 +0000 UTC m=+954.852829182 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "webhook-server-cert" not found Feb 02 17:05:55 crc kubenswrapper[4835]: E0202 17:05:55.231789 4835 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 17:05:55 crc kubenswrapper[4835]: E0202 17:05:55.231870 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:06:03.2318553 +0000 UTC m=+954.853459570 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "metrics-server-cert" not found Feb 02 17:06:00 crc kubenswrapper[4835]: E0202 17:06:00.585453 4835 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/lmiccini/barbican-operator@sha256:840e391b9a51241176705a421996a17a1433878433ce8720d4ed1a4b69319ccd" Feb 02 17:06:00 crc kubenswrapper[4835]: E0202 17:06:00.585919 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/barbican-operator@sha256:840e391b9a51241176705a421996a17a1433878433ce8720d4ed1a4b69319ccd,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7zxhr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-fc589b45f-wlvlw_openstack-operators(ccf3b51e-9298-4a5e-ad19-feac0a171056): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 17:06:00 crc kubenswrapper[4835]: E0202 17:06:00.587160 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" podUID="ccf3b51e-9298-4a5e-ad19-feac0a171056" Feb 02 17:06:01 crc kubenswrapper[4835]: E0202 17:06:01.145123 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/barbican-operator@sha256:840e391b9a51241176705a421996a17a1433878433ce8720d4ed1a4b69319ccd\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" podUID="ccf3b51e-9298-4a5e-ad19-feac0a171056" Feb 02 17:06:02 crc kubenswrapper[4835]: I0202 17:06:02.661728 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert\") pod \"infra-operator-controller-manager-79955696d6-xx8fb\" (UID: \"cb34a8e8-0047-450d-898b-56164cd6f8c3\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:06:02 crc kubenswrapper[4835]: I0202 17:06:02.685432 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb34a8e8-0047-450d-898b-56164cd6f8c3-cert\") pod \"infra-operator-controller-manager-79955696d6-xx8fb\" (UID: \"cb34a8e8-0047-450d-898b-56164cd6f8c3\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:06:02 crc kubenswrapper[4835]: I0202 17:06:02.737021 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:06:03 crc kubenswrapper[4835]: I0202 17:06:03.065823 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq\" (UID: \"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:06:03 crc kubenswrapper[4835]: I0202 17:06:03.072136 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee7ce8b-cbaf-48ff-80d8-92011b4a11fa-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq\" (UID: \"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:06:03 crc kubenswrapper[4835]: I0202 17:06:03.111983 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:06:03 crc kubenswrapper[4835]: I0202 17:06:03.268235 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:06:03 crc kubenswrapper[4835]: I0202 17:06:03.268385 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:06:03 crc kubenswrapper[4835]: E0202 17:06:03.268497 4835 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 17:06:03 crc kubenswrapper[4835]: E0202 17:06:03.268543 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:06:19.268528722 +0000 UTC m=+970.890132802 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "webhook-server-cert" not found Feb 02 17:06:03 crc kubenswrapper[4835]: E0202 17:06:03.268827 4835 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 17:06:03 crc kubenswrapper[4835]: E0202 17:06:03.268859 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs podName:5166e3f9-91d3-4a6a-a4af-68e5063aa217 nodeName:}" failed. No retries permitted until 2026-02-02 17:06:19.268852191 +0000 UTC m=+970.890456271 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs") pod "openstack-operator-controller-manager-7777b795b7-9mpk4" (UID: "5166e3f9-91d3-4a6a-a4af-68e5063aa217") : secret "metrics-server-cert" not found Feb 02 17:06:03 crc kubenswrapper[4835]: E0202 17:06:03.539652 4835 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/lmiccini/keystone-operator@sha256:f6042794464b8ad49246666befd3943cb3ca212334333c0f6fe7a56ff3f6c73f" Feb 02 17:06:03 crc kubenswrapper[4835]: E0202 17:06:03.539933 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/keystone-operator@sha256:f6042794464b8ad49246666befd3943cb3ca212334333c0f6fe7a56ff3f6c73f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-86mt5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-64469b487f-xdk9w_openstack-operators(487c0b98-8b52-47fd-84ff-6637b6d79c8c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 17:06:03 crc kubenswrapper[4835]: E0202 17:06:03.541932 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" podUID="487c0b98-8b52-47fd-84ff-6637b6d79c8c" Feb 02 17:06:03 crc kubenswrapper[4835]: E0202 17:06:03.962603 4835 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Feb 02 17:06:03 crc kubenswrapper[4835]: E0202 17:06:03.962787 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nlktq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-wdspf_openstack-operators(dc9b2536-2284-4bd8-b803-e6dc90e30016): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 17:06:03 crc kubenswrapper[4835]: E0202 17:06:03.964086 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf" podUID="dc9b2536-2284-4bd8-b803-e6dc90e30016" Feb 02 17:06:04 crc kubenswrapper[4835]: E0202 17:06:04.160863 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/keystone-operator@sha256:f6042794464b8ad49246666befd3943cb3ca212334333c0f6fe7a56ff3f6c73f\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" podUID="487c0b98-8b52-47fd-84ff-6637b6d79c8c" Feb 02 17:06:04 crc kubenswrapper[4835]: E0202 17:06:04.160867 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf" podUID="dc9b2536-2284-4bd8-b803-e6dc90e30016" Feb 02 17:06:04 crc kubenswrapper[4835]: I0202 17:06:04.930910 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq"] Feb 02 17:06:04 crc kubenswrapper[4835]: W0202 17:06:04.941578 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeee7ce8b_cbaf_48ff_80d8_92011b4a11fa.slice/crio-1b162168a40faf4fdbdd583e73c6696bb98c28ac0e4eb4e96e1c9d9d6976699e WatchSource:0}: Error finding container 1b162168a40faf4fdbdd583e73c6696bb98c28ac0e4eb4e96e1c9d9d6976699e: Status 404 returned error can't find the container with id 1b162168a40faf4fdbdd583e73c6696bb98c28ac0e4eb4e96e1c9d9d6976699e Feb 02 17:06:04 crc kubenswrapper[4835]: I0202 17:06:04.986261 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb"] Feb 02 17:06:05 crc kubenswrapper[4835]: W0202 17:06:05.026437 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb34a8e8_0047_450d_898b_56164cd6f8c3.slice/crio-8eed2377e4590db0530f30ca94dfa486a47ab64fe41a3fc38129ada998839006 WatchSource:0}: Error finding container 8eed2377e4590db0530f30ca94dfa486a47ab64fe41a3fc38129ada998839006: Status 404 returned error can't find the container with id 8eed2377e4590db0530f30ca94dfa486a47ab64fe41a3fc38129ada998839006 Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.172376 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw" event={"ID":"2fa52615-07a4-47bc-8a7c-62565638964e","Type":"ContainerStarted","Data":"c6d599f30113237457f2c0f9d0624bc17ef7142bc29c423ec28397bf5f3d2a84"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.172451 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.174814 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w" event={"ID":"051510bb-9754-4866-932d-53e8f209af3e","Type":"ContainerStarted","Data":"abfc307ecea2fb49a0755ea26c01386bc6303b7005ca40089a0043c22377bb92"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.174941 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.178470 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk" event={"ID":"affef4c6-1369-40e7-882d-e0cc06c7a492","Type":"ContainerStarted","Data":"8897d6699b0aff30b22d41a8740da915a4e6d91371a13a88419b3e22dcf0d742"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.178604 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.232755 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.232792 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25" event={"ID":"563720e1-311a-4aea-b34b-e6ab1d5d7f44","Type":"ContainerStarted","Data":"47eab6d0fe061ad9d171780d9a09fefabebea87a054c988e6bd8ed26717c6f15"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.242381 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw" podStartSLOduration=3.544385645 podStartE2EDuration="19.242357996s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:47.816943589 +0000 UTC m=+939.438547669" lastFinishedPulling="2026-02-02 17:06:03.51491594 +0000 UTC m=+955.136520020" observedRunningTime="2026-02-02 17:06:05.231722525 +0000 UTC m=+956.853326605" watchObservedRunningTime="2026-02-02 17:06:05.242357996 +0000 UTC m=+956.863962086" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.242846 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" event={"ID":"cb34a8e8-0047-450d-898b-56164cd6f8c3","Type":"ContainerStarted","Data":"8eed2377e4590db0530f30ca94dfa486a47ab64fe41a3fc38129ada998839006"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.268708 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9" event={"ID":"867a5e63-f2c8-45fe-a65a-a8c3d11de2b3","Type":"ContainerStarted","Data":"5355526ee6ac827781caa682d7410350e95f65da765d45555cf7984d82dc0ced"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.269355 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.277123 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w" podStartSLOduration=3.971697297 podStartE2EDuration="19.2771047s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.210509976 +0000 UTC m=+939.832114056" lastFinishedPulling="2026-02-02 17:06:03.515917379 +0000 UTC m=+955.137521459" observedRunningTime="2026-02-02 17:06:05.275571197 +0000 UTC m=+956.897175287" watchObservedRunningTime="2026-02-02 17:06:05.2771047 +0000 UTC m=+956.898708780" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.277636 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w" event={"ID":"9225b13b-9f7b-4e74-8fb2-1cdf6a3a7ce2","Type":"ContainerStarted","Data":"fbcd16782f02ca2757e9bf658eb046a73f6678b4bbb9d4f2ce432bf8f2706ab2"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.278373 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.297138 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl" event={"ID":"bfabfeb6-c3aa-4684-8a0a-c53b92a3a8cf","Type":"ContainerStarted","Data":"05cccf2eebb67cfb7ab7d450317968f8653d6c6fd70d62ba9f4b78ca9f8d73e2"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.297887 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.310485 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj" event={"ID":"8d738981-de82-4d01-a295-b14401942841","Type":"ContainerStarted","Data":"ec9b4f70cbfb2b3f997e875d093641dcd5e3946659c61b7602cdbf1745ec8e7c"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.311109 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.335717 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf" event={"ID":"3b504454-3ebc-45b8-8e93-fcab1363ce3c","Type":"ContainerStarted","Data":"5a788217ea0beaefef6b9a9b8a5618babf5d6328922c4f6085e5e22f604d8a0a"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.336506 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.345205 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g" event={"ID":"60282c99-48f4-4c72-92d2-c92b6720bcf7","Type":"ContainerStarted","Data":"e3c9a3dffc708400f417e053e98ebec6e00bf46b48932e202104d8716b3caecf"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.345984 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.350228 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25" podStartSLOduration=3.646779645 podStartE2EDuration="19.35020661s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.72790335 +0000 UTC m=+940.349507430" lastFinishedPulling="2026-02-02 17:06:04.431330315 +0000 UTC m=+956.052934395" observedRunningTime="2026-02-02 17:06:05.346654 +0000 UTC m=+956.968258090" watchObservedRunningTime="2026-02-02 17:06:05.35020661 +0000 UTC m=+956.971810690" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.370874 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb" event={"ID":"c64313f5-c2dc-4a80-aee6-4c177172598f","Type":"ContainerStarted","Data":"33344b48ebb05f9fa1799bc37a52a44a20493a0177f3b61bbf9e574d9c6cdb3e"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.371657 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.384608 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9" event={"ID":"668fc23c-0c08-4f7e-839d-6fbcf5f6554d","Type":"ContainerStarted","Data":"62e5ad8afedd63d9a9468aad0d9c023864e704a2cd9f6d53e65c64a490cbf23f"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.385543 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.400583 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc" event={"ID":"a2b75f19-bcbe-4f09-9652-70f042d4bc29","Type":"ContainerStarted","Data":"147b48e51b74c7e1b16c69fd2c4c65ac8e10e261855c5525bd0ddd21856ec1fa"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.401368 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.421814 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" event={"ID":"ac2c47dc-967c-456e-affc-bb3c4ac5b6d0","Type":"ContainerStarted","Data":"bea042d1f48cef9dc3add6bc2a8ce34d89edd46fbf37b59783682f90620e62f9"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.422882 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.434927 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" event={"ID":"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa","Type":"ContainerStarted","Data":"1b162168a40faf4fdbdd583e73c6696bb98c28ac0e4eb4e96e1c9d9d6976699e"} Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.447646 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk" podStartSLOduration=2.622366574 podStartE2EDuration="18.44762113s" podCreationTimestamp="2026-02-02 17:05:47 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.605960896 +0000 UTC m=+940.227564976" lastFinishedPulling="2026-02-02 17:06:04.431215452 +0000 UTC m=+956.052819532" observedRunningTime="2026-02-02 17:06:05.387720613 +0000 UTC m=+957.009324703" watchObservedRunningTime="2026-02-02 17:06:05.44762113 +0000 UTC m=+957.069225210" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.452013 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9" podStartSLOduration=3.755479103 podStartE2EDuration="19.451990633s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:47.81838458 +0000 UTC m=+939.439988660" lastFinishedPulling="2026-02-02 17:06:03.51489611 +0000 UTC m=+955.136500190" observedRunningTime="2026-02-02 17:06:05.443722969 +0000 UTC m=+957.065327059" watchObservedRunningTime="2026-02-02 17:06:05.451990633 +0000 UTC m=+957.073594713" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.563192 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl" podStartSLOduration=4.323024548 podStartE2EDuration="19.563172262s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.706303718 +0000 UTC m=+940.327907808" lastFinishedPulling="2026-02-02 17:06:03.946451442 +0000 UTC m=+955.568055522" observedRunningTime="2026-02-02 17:06:05.53554054 +0000 UTC m=+957.157144630" watchObservedRunningTime="2026-02-02 17:06:05.563172262 +0000 UTC m=+957.184776342" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.630823 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w" podStartSLOduration=4.399067303 podStartE2EDuration="19.630803668s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.714769748 +0000 UTC m=+940.336373828" lastFinishedPulling="2026-02-02 17:06:03.946506113 +0000 UTC m=+955.568110193" observedRunningTime="2026-02-02 17:06:05.63018079 +0000 UTC m=+957.251784880" watchObservedRunningTime="2026-02-02 17:06:05.630803668 +0000 UTC m=+957.252407758" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.632465 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj" podStartSLOduration=3.374283797 podStartE2EDuration="19.632450625s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.173175038 +0000 UTC m=+939.794779118" lastFinishedPulling="2026-02-02 17:06:04.431341856 +0000 UTC m=+956.052945946" observedRunningTime="2026-02-02 17:06:05.606607153 +0000 UTC m=+957.228211233" watchObservedRunningTime="2026-02-02 17:06:05.632450625 +0000 UTC m=+957.254054705" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.721758 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g" podStartSLOduration=4.378671594 podStartE2EDuration="19.721737313s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.603413424 +0000 UTC m=+940.225017504" lastFinishedPulling="2026-02-02 17:06:03.946479143 +0000 UTC m=+955.568083223" observedRunningTime="2026-02-02 17:06:05.715149267 +0000 UTC m=+957.336753357" watchObservedRunningTime="2026-02-02 17:06:05.721737313 +0000 UTC m=+957.343341393" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.766231 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf" podStartSLOduration=4.5480158809999995 podStartE2EDuration="19.766209153s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.72825653 +0000 UTC m=+940.349860610" lastFinishedPulling="2026-02-02 17:06:03.946449802 +0000 UTC m=+955.568053882" observedRunningTime="2026-02-02 17:06:05.752486234 +0000 UTC m=+957.374090324" watchObservedRunningTime="2026-02-02 17:06:05.766209153 +0000 UTC m=+957.387813233" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.802103 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9" podStartSLOduration=4.085746709 podStartE2EDuration="19.802081539s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.714968594 +0000 UTC m=+940.336572674" lastFinishedPulling="2026-02-02 17:06:04.431303424 +0000 UTC m=+956.052907504" observedRunningTime="2026-02-02 17:06:05.801261046 +0000 UTC m=+957.422865126" watchObservedRunningTime="2026-02-02 17:06:05.802081539 +0000 UTC m=+957.423685619" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.842023 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb" podStartSLOduration=4.610632735 podStartE2EDuration="19.84201274s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.71519642 +0000 UTC m=+940.336800500" lastFinishedPulling="2026-02-02 17:06:03.946576425 +0000 UTC m=+955.568180505" observedRunningTime="2026-02-02 17:06:05.839980822 +0000 UTC m=+957.461584902" watchObservedRunningTime="2026-02-02 17:06:05.84201274 +0000 UTC m=+957.463616820" Feb 02 17:06:05 crc kubenswrapper[4835]: I0202 17:06:05.874828 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc" podStartSLOduration=4.121833141 podStartE2EDuration="19.874806889s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.714437739 +0000 UTC m=+940.336041819" lastFinishedPulling="2026-02-02 17:06:04.467411487 +0000 UTC m=+956.089015567" observedRunningTime="2026-02-02 17:06:05.871764563 +0000 UTC m=+957.493368653" watchObservedRunningTime="2026-02-02 17:06:05.874806889 +0000 UTC m=+957.496410969" Feb 02 17:06:09 crc kubenswrapper[4835]: I0202 17:06:09.213684 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" podStartSLOduration=7.457750113 podStartE2EDuration="23.213666195s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.762051077 +0000 UTC m=+940.383655157" lastFinishedPulling="2026-02-02 17:06:04.517967159 +0000 UTC m=+956.139571239" observedRunningTime="2026-02-02 17:06:05.907869765 +0000 UTC m=+957.529473865" watchObservedRunningTime="2026-02-02 17:06:09.213666195 +0000 UTC m=+960.835270275" Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.480743 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" event={"ID":"eb7ea012-63e3-4108-bb3b-904fd21a7c4c","Type":"ContainerStarted","Data":"762ea80a9ece4c242608ee2be5c1be41d6c45d0242b7a4b371fe58d6700d347f"} Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.481440 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.482661 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" event={"ID":"eee7ce8b-cbaf-48ff-80d8-92011b4a11fa","Type":"ContainerStarted","Data":"e433158b2563d0b14998bdbcbcbd981af1fd6f5d4461c227961b5908381ac746"} Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.482774 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.484004 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" event={"ID":"af6dd5c2-faa4-407d-b6bc-fffda146240b","Type":"ContainerStarted","Data":"5677df8649c7046b560c2be129ece1fb4da1d2508370584ae033c27678d56322"} Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.484434 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.486899 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" event={"ID":"cb34a8e8-0047-450d-898b-56164cd6f8c3","Type":"ContainerStarted","Data":"466e5b3941eb6e064b12c85354cfa0a191936717692f1a855af51824cf148c48"} Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.487293 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.488727 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" event={"ID":"83fe7277-43df-4e53-b2e1-20ec1c340289","Type":"ContainerStarted","Data":"cf49f21d401535b64ab6e193fa7241160fc84feb4d2ba2b6d807dc03522adb6f"} Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.489080 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.511316 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" podStartSLOduration=2.516960897 podStartE2EDuration="24.51129641s" podCreationTimestamp="2026-02-02 17:05:47 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.92916493 +0000 UTC m=+940.550769010" lastFinishedPulling="2026-02-02 17:06:10.923500443 +0000 UTC m=+962.545104523" observedRunningTime="2026-02-02 17:06:11.505940359 +0000 UTC m=+963.127544439" watchObservedRunningTime="2026-02-02 17:06:11.51129641 +0000 UTC m=+963.132900490" Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.522642 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" podStartSLOduration=3.385166015 podStartE2EDuration="25.522627271s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.746529238 +0000 UTC m=+940.368133318" lastFinishedPulling="2026-02-02 17:06:10.883990494 +0000 UTC m=+962.505594574" observedRunningTime="2026-02-02 17:06:11.517486816 +0000 UTC m=+963.139090896" watchObservedRunningTime="2026-02-02 17:06:11.522627271 +0000 UTC m=+963.144231351" Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.541965 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" podStartSLOduration=19.605197351 podStartE2EDuration="25.541948318s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:06:04.949073609 +0000 UTC m=+956.570677689" lastFinishedPulling="2026-02-02 17:06:10.885824576 +0000 UTC m=+962.507428656" observedRunningTime="2026-02-02 17:06:11.539227511 +0000 UTC m=+963.160831611" watchObservedRunningTime="2026-02-02 17:06:11.541948318 +0000 UTC m=+963.163552398" Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.556054 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" podStartSLOduration=2.459315155 podStartE2EDuration="24.556031047s" podCreationTimestamp="2026-02-02 17:05:47 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.787588111 +0000 UTC m=+940.409192181" lastFinishedPulling="2026-02-02 17:06:10.884304003 +0000 UTC m=+962.505908073" observedRunningTime="2026-02-02 17:06:11.551106648 +0000 UTC m=+963.172710728" watchObservedRunningTime="2026-02-02 17:06:11.556031047 +0000 UTC m=+963.177635127" Feb 02 17:06:11 crc kubenswrapper[4835]: I0202 17:06:11.571311 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" podStartSLOduration=19.717829232 podStartE2EDuration="25.57129257s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:06:05.030621319 +0000 UTC m=+956.652225389" lastFinishedPulling="2026-02-02 17:06:10.884084647 +0000 UTC m=+962.505688727" observedRunningTime="2026-02-02 17:06:11.569436237 +0000 UTC m=+963.191040317" watchObservedRunningTime="2026-02-02 17:06:11.57129257 +0000 UTC m=+963.192896650" Feb 02 17:06:13 crc kubenswrapper[4835]: I0202 17:06:13.501684 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" event={"ID":"ccf3b51e-9298-4a5e-ad19-feac0a171056","Type":"ContainerStarted","Data":"b2d0cb667d9b0f60979e0f786b4fa0d88f910c16f3d801c2a00e463738e363bf"} Feb 02 17:06:13 crc kubenswrapper[4835]: I0202 17:06:13.502184 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" Feb 02 17:06:13 crc kubenswrapper[4835]: I0202 17:06:13.518551 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" podStartSLOduration=2.6168770759999997 podStartE2EDuration="27.518534812s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:47.707105388 +0000 UTC m=+939.328709468" lastFinishedPulling="2026-02-02 17:06:12.608763124 +0000 UTC m=+964.230367204" observedRunningTime="2026-02-02 17:06:13.514779075 +0000 UTC m=+965.136383175" watchObservedRunningTime="2026-02-02 17:06:13.518534812 +0000 UTC m=+965.140138892" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.003260 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-5968f45b79-bhwd9" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.013066 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-2smkw" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.070242 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-l2d9w" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.079113 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-sf4fj" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.128564 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-dpqkl" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.214310 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-87bd9d46f-5bvq9" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.236621 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-knb4g" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.306052 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-576995988b-pd7lc" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.346513 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-mt62w" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.375925 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5644b66645-89lbp" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.427261 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-vl7xb" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.495090 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-ccmdf" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.540149 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-lrq25" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.578378 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-kjlbl" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.640380 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-r2xwk" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.671954 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2klm7" Feb 02 17:06:17 crc kubenswrapper[4835]: I0202 17:06:17.690306 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-rrg8c" Feb 02 17:06:19 crc kubenswrapper[4835]: I0202 17:06:19.315256 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:06:19 crc kubenswrapper[4835]: I0202 17:06:19.316401 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:06:19 crc kubenswrapper[4835]: I0202 17:06:19.322433 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-metrics-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:06:19 crc kubenswrapper[4835]: I0202 17:06:19.322497 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5166e3f9-91d3-4a6a-a4af-68e5063aa217-webhook-certs\") pod \"openstack-operator-controller-manager-7777b795b7-9mpk4\" (UID: \"5166e3f9-91d3-4a6a-a4af-68e5063aa217\") " pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:06:19 crc kubenswrapper[4835]: I0202 17:06:19.552041 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-7w54s" Feb 02 17:06:19 crc kubenswrapper[4835]: I0202 17:06:19.560366 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:06:19 crc kubenswrapper[4835]: I0202 17:06:19.989361 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4"] Feb 02 17:06:19 crc kubenswrapper[4835]: W0202 17:06:19.998829 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5166e3f9_91d3_4a6a_a4af_68e5063aa217.slice/crio-bf27f9e27ad4a1638ecd2d68a54e86487ec5db4dc3024b5b4a53257f97b7380f WatchSource:0}: Error finding container bf27f9e27ad4a1638ecd2d68a54e86487ec5db4dc3024b5b4a53257f97b7380f: Status 404 returned error can't find the container with id bf27f9e27ad4a1638ecd2d68a54e86487ec5db4dc3024b5b4a53257f97b7380f Feb 02 17:06:20 crc kubenswrapper[4835]: I0202 17:06:20.556560 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" event={"ID":"5166e3f9-91d3-4a6a-a4af-68e5063aa217","Type":"ContainerStarted","Data":"bf27f9e27ad4a1638ecd2d68a54e86487ec5db4dc3024b5b4a53257f97b7380f"} Feb 02 17:06:22 crc kubenswrapper[4835]: I0202 17:06:22.746504 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-79955696d6-xx8fb" Feb 02 17:06:23 crc kubenswrapper[4835]: I0202 17:06:23.118350 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq" Feb 02 17:06:26 crc kubenswrapper[4835]: I0202 17:06:26.602196 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" event={"ID":"5166e3f9-91d3-4a6a-a4af-68e5063aa217","Type":"ContainerStarted","Data":"ffb001b7de9597182efbe9766c0394d76dfae5660e1b29119ad29f3652fbd4b3"} Feb 02 17:06:26 crc kubenswrapper[4835]: I0202 17:06:26.602564 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:06:26 crc kubenswrapper[4835]: I0202 17:06:26.634595 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" podStartSLOduration=39.634576753 podStartE2EDuration="39.634576753s" podCreationTimestamp="2026-02-02 17:05:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:06:26.633974456 +0000 UTC m=+978.255578556" watchObservedRunningTime="2026-02-02 17:06:26.634576753 +0000 UTC m=+978.256180833" Feb 02 17:06:26 crc kubenswrapper[4835]: I0202 17:06:26.981785 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-wlvlw" Feb 02 17:06:27 crc kubenswrapper[4835]: I0202 17:06:27.610704 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" event={"ID":"487c0b98-8b52-47fd-84ff-6637b6d79c8c","Type":"ContainerStarted","Data":"16109d84b2ec3fbfc6dd273cd837f563674913f123adab08ea6858b60cc68d7b"} Feb 02 17:06:27 crc kubenswrapper[4835]: I0202 17:06:27.610973 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" Feb 02 17:06:27 crc kubenswrapper[4835]: I0202 17:06:27.612508 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf" event={"ID":"dc9b2536-2284-4bd8-b803-e6dc90e30016","Type":"ContainerStarted","Data":"8fd6ff4c81e93356d8d2eefb0743d83780ca089374b2d2340518801bc795d604"} Feb 02 17:06:27 crc kubenswrapper[4835]: I0202 17:06:27.627029 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" podStartSLOduration=3.569116896 podStartE2EDuration="41.626979181s" podCreationTimestamp="2026-02-02 17:05:46 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.677548334 +0000 UTC m=+940.299152414" lastFinishedPulling="2026-02-02 17:06:26.735410619 +0000 UTC m=+978.357014699" observedRunningTime="2026-02-02 17:06:27.626042854 +0000 UTC m=+979.247646954" watchObservedRunningTime="2026-02-02 17:06:27.626979181 +0000 UTC m=+979.248583281" Feb 02 17:06:37 crc kubenswrapper[4835]: I0202 17:06:37.255511 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-xdk9w" Feb 02 17:06:37 crc kubenswrapper[4835]: I0202 17:06:37.281907 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wdspf" podStartSLOduration=12.467297859 podStartE2EDuration="50.281878464s" podCreationTimestamp="2026-02-02 17:05:47 +0000 UTC" firstStartedPulling="2026-02-02 17:05:48.919598989 +0000 UTC m=+940.541203069" lastFinishedPulling="2026-02-02 17:06:26.734179594 +0000 UTC m=+978.355783674" observedRunningTime="2026-02-02 17:06:27.645003511 +0000 UTC m=+979.266607601" watchObservedRunningTime="2026-02-02 17:06:37.281878464 +0000 UTC m=+988.903482584" Feb 02 17:06:39 crc kubenswrapper[4835]: I0202 17:06:39.567503 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7777b795b7-9mpk4" Feb 02 17:06:44 crc kubenswrapper[4835]: I0202 17:06:44.870209 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:06:44 crc kubenswrapper[4835]: I0202 17:06:44.870642 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.034398 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xf47z"] Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.040183 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.043378 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-qsbmf" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.043609 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.043791 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.043980 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.053775 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xf47z"] Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.089446 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-k86qz"] Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.092651 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.096115 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.099018 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-k86qz"] Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.143376 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95780417-773e-4112-b118-9a15aa9944b7-config\") pod \"dnsmasq-dns-675f4bcbfc-xf47z\" (UID: \"95780417-773e-4112-b118-9a15aa9944b7\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.143446 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-955g8\" (UniqueName: \"kubernetes.io/projected/95780417-773e-4112-b118-9a15aa9944b7-kube-api-access-955g8\") pod \"dnsmasq-dns-675f4bcbfc-xf47z\" (UID: \"95780417-773e-4112-b118-9a15aa9944b7\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.245165 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-config\") pod \"dnsmasq-dns-78dd6ddcc-k86qz\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.245206 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrdvh\" (UniqueName: \"kubernetes.io/projected/3c49cfa8-49a8-45ff-8088-303906cc33a9-kube-api-access-rrdvh\") pod \"dnsmasq-dns-78dd6ddcc-k86qz\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.245268 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95780417-773e-4112-b118-9a15aa9944b7-config\") pod \"dnsmasq-dns-675f4bcbfc-xf47z\" (UID: \"95780417-773e-4112-b118-9a15aa9944b7\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.245328 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-955g8\" (UniqueName: \"kubernetes.io/projected/95780417-773e-4112-b118-9a15aa9944b7-kube-api-access-955g8\") pod \"dnsmasq-dns-675f4bcbfc-xf47z\" (UID: \"95780417-773e-4112-b118-9a15aa9944b7\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.245365 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-k86qz\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.246034 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95780417-773e-4112-b118-9a15aa9944b7-config\") pod \"dnsmasq-dns-675f4bcbfc-xf47z\" (UID: \"95780417-773e-4112-b118-9a15aa9944b7\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.269146 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-955g8\" (UniqueName: \"kubernetes.io/projected/95780417-773e-4112-b118-9a15aa9944b7-kube-api-access-955g8\") pod \"dnsmasq-dns-675f4bcbfc-xf47z\" (UID: \"95780417-773e-4112-b118-9a15aa9944b7\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.347130 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-k86qz\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.347196 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-config\") pod \"dnsmasq-dns-78dd6ddcc-k86qz\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.347222 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrdvh\" (UniqueName: \"kubernetes.io/projected/3c49cfa8-49a8-45ff-8088-303906cc33a9-kube-api-access-rrdvh\") pod \"dnsmasq-dns-78dd6ddcc-k86qz\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.348007 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-k86qz\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.348091 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-config\") pod \"dnsmasq-dns-78dd6ddcc-k86qz\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.361837 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.392074 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrdvh\" (UniqueName: \"kubernetes.io/projected/3c49cfa8-49a8-45ff-8088-303906cc33a9-kube-api-access-rrdvh\") pod \"dnsmasq-dns-78dd6ddcc-k86qz\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.408703 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.856168 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.857125 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xf47z"] Feb 02 17:06:56 crc kubenswrapper[4835]: W0202 17:06:56.938368 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c49cfa8_49a8_45ff_8088_303906cc33a9.slice/crio-5470bf979217cfb2dc3a61302eac24bdbb776cfaa40b08377a6764b08ae2c25b WatchSource:0}: Error finding container 5470bf979217cfb2dc3a61302eac24bdbb776cfaa40b08377a6764b08ae2c25b: Status 404 returned error can't find the container with id 5470bf979217cfb2dc3a61302eac24bdbb776cfaa40b08377a6764b08ae2c25b Feb 02 17:06:56 crc kubenswrapper[4835]: I0202 17:06:56.940420 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-k86qz"] Feb 02 17:06:57 crc kubenswrapper[4835]: I0202 17:06:57.835142 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" event={"ID":"3c49cfa8-49a8-45ff-8088-303906cc33a9","Type":"ContainerStarted","Data":"5470bf979217cfb2dc3a61302eac24bdbb776cfaa40b08377a6764b08ae2c25b"} Feb 02 17:06:57 crc kubenswrapper[4835]: I0202 17:06:57.836551 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" event={"ID":"95780417-773e-4112-b118-9a15aa9944b7","Type":"ContainerStarted","Data":"91c3b7e70d444b4f7e45a436bd73bf911dabfd8f7d348472c1a800f2e973737d"} Feb 02 17:06:58 crc kubenswrapper[4835]: I0202 17:06:58.874864 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xf47z"] Feb 02 17:06:58 crc kubenswrapper[4835]: I0202 17:06:58.893364 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b6c75"] Feb 02 17:06:58 crc kubenswrapper[4835]: I0202 17:06:58.894817 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:06:58 crc kubenswrapper[4835]: I0202 17:06:58.901072 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b6c75"] Feb 02 17:06:58 crc kubenswrapper[4835]: I0202 17:06:58.988502 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-config\") pod \"dnsmasq-dns-666b6646f7-b6c75\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:06:58 crc kubenswrapper[4835]: I0202 17:06:58.988557 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7n9c\" (UniqueName: \"kubernetes.io/projected/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-kube-api-access-z7n9c\") pod \"dnsmasq-dns-666b6646f7-b6c75\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:06:58 crc kubenswrapper[4835]: I0202 17:06:58.989540 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-dns-svc\") pod \"dnsmasq-dns-666b6646f7-b6c75\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.091044 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-config\") pod \"dnsmasq-dns-666b6646f7-b6c75\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.091087 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7n9c\" (UniqueName: \"kubernetes.io/projected/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-kube-api-access-z7n9c\") pod \"dnsmasq-dns-666b6646f7-b6c75\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.091148 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-dns-svc\") pod \"dnsmasq-dns-666b6646f7-b6c75\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.092028 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-dns-svc\") pod \"dnsmasq-dns-666b6646f7-b6c75\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.093621 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-config\") pod \"dnsmasq-dns-666b6646f7-b6c75\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.135440 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7n9c\" (UniqueName: \"kubernetes.io/projected/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-kube-api-access-z7n9c\") pod \"dnsmasq-dns-666b6646f7-b6c75\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.167987 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-k86qz"] Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.207818 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8pdms"] Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.210920 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.219126 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8pdms"] Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.220377 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.397447 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-config\") pod \"dnsmasq-dns-57d769cc4f-8pdms\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.397848 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtspr\" (UniqueName: \"kubernetes.io/projected/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-kube-api-access-xtspr\") pod \"dnsmasq-dns-57d769cc4f-8pdms\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.397907 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-8pdms\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.502183 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtspr\" (UniqueName: \"kubernetes.io/projected/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-kube-api-access-xtspr\") pod \"dnsmasq-dns-57d769cc4f-8pdms\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.502293 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-8pdms\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.502383 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-config\") pod \"dnsmasq-dns-57d769cc4f-8pdms\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.503236 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-8pdms\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.503503 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-config\") pod \"dnsmasq-dns-57d769cc4f-8pdms\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.522106 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtspr\" (UniqueName: \"kubernetes.io/projected/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-kube-api-access-xtspr\") pod \"dnsmasq-dns-57d769cc4f-8pdms\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.548386 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.725853 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b6c75"] Feb 02 17:06:59 crc kubenswrapper[4835]: W0202 17:06:59.736403 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod18c2aaa4_d81b_4b79_a14c_2ac5041aa544.slice/crio-c60a431017f3e708df9ae087e47757cdb5bc752b5dea5a7ac760c886d7fe6441 WatchSource:0}: Error finding container c60a431017f3e708df9ae087e47757cdb5bc752b5dea5a7ac760c886d7fe6441: Status 404 returned error can't find the container with id c60a431017f3e708df9ae087e47757cdb5bc752b5dea5a7ac760c886d7fe6441 Feb 02 17:06:59 crc kubenswrapper[4835]: I0202 17:06:59.898318 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" event={"ID":"18c2aaa4-d81b-4b79-a14c-2ac5041aa544","Type":"ContainerStarted","Data":"c60a431017f3e708df9ae087e47757cdb5bc752b5dea5a7ac760c886d7fe6441"} Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.021302 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8pdms"] Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.035122 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.037346 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.043476 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jm6m2" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.044977 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.045198 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.045378 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.045518 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.045653 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.045796 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.050372 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.212526 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5cb4e8f7-3881-4fef-9056-0e2f149aab21-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.212701 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.212754 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bnzz\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-kube-api-access-7bnzz\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.212794 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.212816 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-server-conf\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.212845 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5cb4e8f7-3881-4fef-9056-0e2f149aab21-pod-info\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.212873 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.212898 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.212935 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.212980 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-config-data\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.213002 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.314396 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.314456 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bnzz\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-kube-api-access-7bnzz\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.314484 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.314501 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-server-conf\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.314525 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5cb4e8f7-3881-4fef-9056-0e2f149aab21-pod-info\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.314561 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.314580 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.314606 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.314628 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-config-data\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.314643 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.314658 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5cb4e8f7-3881-4fef-9056-0e2f149aab21-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.315062 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.316245 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.316680 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.317082 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.318432 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.319087 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-config-data\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.319412 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.319537 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.319715 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.319767 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.319875 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-mjbch" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.319921 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.320144 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.320656 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.324154 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-server-conf\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.327629 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.329519 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.330412 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5cb4e8f7-3881-4fef-9056-0e2f149aab21-pod-info\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.338928 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bnzz\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-kube-api-access-7bnzz\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.347409 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.349990 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5cb4e8f7-3881-4fef-9056-0e2f149aab21-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.361322 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.361910 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.415669 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slbzw\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-kube-api-access-slbzw\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.415730 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.415748 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.415959 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.416007 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.416049 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.416074 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.416095 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.416182 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.416242 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.416383 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.517397 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.517435 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.517458 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.517473 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.517490 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.517528 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.517555 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.517588 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.517617 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slbzw\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-kube-api-access-slbzw\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.517644 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.517659 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.518065 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.518615 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.518713 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.519605 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.520763 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.521091 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.521817 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.522971 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.523735 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.532108 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.536185 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slbzw\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-kube-api-access-slbzw\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.545050 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:00 crc kubenswrapper[4835]: I0202 17:07:00.712815 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.500598 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.502534 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.505640 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.505904 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.507126 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-px2th" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.515324 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.518417 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.526982 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.632222 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eec68dd7-cf6a-45a4-a036-19bcf050c892-operator-scripts\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.632283 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec68dd7-cf6a-45a4-a036-19bcf050c892-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.632334 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/eec68dd7-cf6a-45a4-a036-19bcf050c892-config-data-default\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.632356 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/eec68dd7-cf6a-45a4-a036-19bcf050c892-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.632556 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/eec68dd7-cf6a-45a4-a036-19bcf050c892-config-data-generated\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.632591 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.632638 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eec68dd7-cf6a-45a4-a036-19bcf050c892-kolla-config\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.632681 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sblp6\" (UniqueName: \"kubernetes.io/projected/eec68dd7-cf6a-45a4-a036-19bcf050c892-kube-api-access-sblp6\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.733705 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eec68dd7-cf6a-45a4-a036-19bcf050c892-operator-scripts\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.733774 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec68dd7-cf6a-45a4-a036-19bcf050c892-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.733840 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/eec68dd7-cf6a-45a4-a036-19bcf050c892-config-data-default\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.733862 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/eec68dd7-cf6a-45a4-a036-19bcf050c892-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.733894 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/eec68dd7-cf6a-45a4-a036-19bcf050c892-config-data-generated\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.733929 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.734389 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.734493 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/eec68dd7-cf6a-45a4-a036-19bcf050c892-config-data-generated\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.734982 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/eec68dd7-cf6a-45a4-a036-19bcf050c892-config-data-default\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.735065 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eec68dd7-cf6a-45a4-a036-19bcf050c892-kolla-config\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.735563 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eec68dd7-cf6a-45a4-a036-19bcf050c892-operator-scripts\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.735670 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eec68dd7-cf6a-45a4-a036-19bcf050c892-kolla-config\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.735744 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sblp6\" (UniqueName: \"kubernetes.io/projected/eec68dd7-cf6a-45a4-a036-19bcf050c892-kube-api-access-sblp6\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.738710 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/eec68dd7-cf6a-45a4-a036-19bcf050c892-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.739050 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec68dd7-cf6a-45a4-a036-19bcf050c892-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.757039 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.760497 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sblp6\" (UniqueName: \"kubernetes.io/projected/eec68dd7-cf6a-45a4-a036-19bcf050c892-kube-api-access-sblp6\") pod \"openstack-galera-0\" (UID: \"eec68dd7-cf6a-45a4-a036-19bcf050c892\") " pod="openstack/openstack-galera-0" Feb 02 17:07:01 crc kubenswrapper[4835]: I0202 17:07:01.840082 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 02 17:07:02 crc kubenswrapper[4835]: W0202 17:07:02.674066 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cb969a5_3a5e_4bb5_8499_87d9fe1fb148.slice/crio-e774724d2b4c9a904d5c7b19ba48c339ab15d29780c03dc47366ed434a9a65f5 WatchSource:0}: Error finding container e774724d2b4c9a904d5c7b19ba48c339ab15d29780c03dc47366ed434a9a65f5: Status 404 returned error can't find the container with id e774724d2b4c9a904d5c7b19ba48c339ab15d29780c03dc47366ed434a9a65f5 Feb 02 17:07:02 crc kubenswrapper[4835]: I0202 17:07:02.918142 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 17:07:02 crc kubenswrapper[4835]: I0202 17:07:02.919744 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:02 crc kubenswrapper[4835]: I0202 17:07:02.922746 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Feb 02 17:07:02 crc kubenswrapper[4835]: I0202 17:07:02.922951 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Feb 02 17:07:02 crc kubenswrapper[4835]: I0202 17:07:02.922980 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Feb 02 17:07:02 crc kubenswrapper[4835]: I0202 17:07:02.923097 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-rjkcb" Feb 02 17:07:02 crc kubenswrapper[4835]: I0202 17:07:02.925043 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 17:07:02 crc kubenswrapper[4835]: I0202 17:07:02.926714 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" event={"ID":"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148","Type":"ContainerStarted","Data":"e774724d2b4c9a904d5c7b19ba48c339ab15d29780c03dc47366ed434a9a65f5"} Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.053964 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b630cc52-70e1-4580-8d73-df2507194554-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.054014 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b630cc52-70e1-4580-8d73-df2507194554-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.054053 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jswf5\" (UniqueName: \"kubernetes.io/projected/b630cc52-70e1-4580-8d73-df2507194554-kube-api-access-jswf5\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.054091 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b630cc52-70e1-4580-8d73-df2507194554-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.054122 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b630cc52-70e1-4580-8d73-df2507194554-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.054169 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.054246 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b630cc52-70e1-4580-8d73-df2507194554-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.054318 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b630cc52-70e1-4580-8d73-df2507194554-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.156150 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b630cc52-70e1-4580-8d73-df2507194554-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.156208 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b630cc52-70e1-4580-8d73-df2507194554-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.156236 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b630cc52-70e1-4580-8d73-df2507194554-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.156260 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b630cc52-70e1-4580-8d73-df2507194554-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.156291 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jswf5\" (UniqueName: \"kubernetes.io/projected/b630cc52-70e1-4580-8d73-df2507194554-kube-api-access-jswf5\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.156315 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b630cc52-70e1-4580-8d73-df2507194554-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.156338 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b630cc52-70e1-4580-8d73-df2507194554-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.156367 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.156641 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.156711 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b630cc52-70e1-4580-8d73-df2507194554-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.157816 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b630cc52-70e1-4580-8d73-df2507194554-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.157828 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b630cc52-70e1-4580-8d73-df2507194554-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.159000 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b630cc52-70e1-4580-8d73-df2507194554-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.164816 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b630cc52-70e1-4580-8d73-df2507194554-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.164882 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b630cc52-70e1-4580-8d73-df2507194554-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.175457 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jswf5\" (UniqueName: \"kubernetes.io/projected/b630cc52-70e1-4580-8d73-df2507194554-kube-api-access-jswf5\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.194773 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b630cc52-70e1-4580-8d73-df2507194554\") " pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.240485 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.241771 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.246468 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.247386 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-cnxrl" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.247538 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.248373 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.262389 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.359127 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.359309 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-kolla-config\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.359540 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.359613 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdkh2\" (UniqueName: \"kubernetes.io/projected/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-kube-api-access-fdkh2\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.359771 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-config-data\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.460800 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.460852 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdkh2\" (UniqueName: \"kubernetes.io/projected/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-kube-api-access-fdkh2\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.460892 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-config-data\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.460920 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.460956 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-kolla-config\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.461896 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-kolla-config\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.462027 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-config-data\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.465100 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.472972 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.479518 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdkh2\" (UniqueName: \"kubernetes.io/projected/a5b7fac0-4bb3-4138-9618-96bf25cbdde5-kube-api-access-fdkh2\") pod \"memcached-0\" (UID: \"a5b7fac0-4bb3-4138-9618-96bf25cbdde5\") " pod="openstack/memcached-0" Feb 02 17:07:03 crc kubenswrapper[4835]: I0202 17:07:03.574698 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 02 17:07:04 crc kubenswrapper[4835]: I0202 17:07:04.953990 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 17:07:04 crc kubenswrapper[4835]: I0202 17:07:04.955238 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 17:07:04 crc kubenswrapper[4835]: I0202 17:07:04.959384 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-zrdtx" Feb 02 17:07:04 crc kubenswrapper[4835]: I0202 17:07:04.975233 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 17:07:05 crc kubenswrapper[4835]: I0202 17:07:05.085196 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4swpg\" (UniqueName: \"kubernetes.io/projected/f808d0ef-5504-4d6c-9551-28b94cb89838-kube-api-access-4swpg\") pod \"kube-state-metrics-0\" (UID: \"f808d0ef-5504-4d6c-9551-28b94cb89838\") " pod="openstack/kube-state-metrics-0" Feb 02 17:07:05 crc kubenswrapper[4835]: I0202 17:07:05.187135 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4swpg\" (UniqueName: \"kubernetes.io/projected/f808d0ef-5504-4d6c-9551-28b94cb89838-kube-api-access-4swpg\") pod \"kube-state-metrics-0\" (UID: \"f808d0ef-5504-4d6c-9551-28b94cb89838\") " pod="openstack/kube-state-metrics-0" Feb 02 17:07:05 crc kubenswrapper[4835]: I0202 17:07:05.208566 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4swpg\" (UniqueName: \"kubernetes.io/projected/f808d0ef-5504-4d6c-9551-28b94cb89838-kube-api-access-4swpg\") pod \"kube-state-metrics-0\" (UID: \"f808d0ef-5504-4d6c-9551-28b94cb89838\") " pod="openstack/kube-state-metrics-0" Feb 02 17:07:05 crc kubenswrapper[4835]: I0202 17:07:05.272462 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 17:07:06 crc kubenswrapper[4835]: I0202 17:07:06.599642 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 17:07:06 crc kubenswrapper[4835]: I0202 17:07:06.646311 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 17:07:08 crc kubenswrapper[4835]: I0202 17:07:08.927970 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-hxh6p"] Feb 02 17:07:08 crc kubenswrapper[4835]: I0202 17:07:08.929331 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:08 crc kubenswrapper[4835]: I0202 17:07:08.933328 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-vxtl2" Feb 02 17:07:08 crc kubenswrapper[4835]: I0202 17:07:08.935071 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Feb 02 17:07:08 crc kubenswrapper[4835]: I0202 17:07:08.937005 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxh6p"] Feb 02 17:07:08 crc kubenswrapper[4835]: I0202 17:07:08.944183 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Feb 02 17:07:08 crc kubenswrapper[4835]: I0202 17:07:08.947061 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-248s6"] Feb 02 17:07:08 crc kubenswrapper[4835]: I0202 17:07:08.949070 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:08 crc kubenswrapper[4835]: I0202 17:07:08.954945 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-248s6"] Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.046961 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-var-log\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047087 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7qt6\" (UniqueName: \"kubernetes.io/projected/e3608c64-7b50-4a57-a0ea-578164629872-kube-api-access-g7qt6\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047119 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89395ae4-5378-4709-a8b2-5b412e709142-scripts\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047158 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3608c64-7b50-4a57-a0ea-578164629872-var-log-ovn\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047180 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3608c64-7b50-4a57-a0ea-578164629872-scripts\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047246 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-etc-ovs\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047296 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3608c64-7b50-4a57-a0ea-578164629872-var-run-ovn\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047328 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-var-lib\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047349 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3608c64-7b50-4a57-a0ea-578164629872-combined-ca-bundle\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047366 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3608c64-7b50-4a57-a0ea-578164629872-ovn-controller-tls-certs\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047432 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-var-run\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047459 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgsdr\" (UniqueName: \"kubernetes.io/projected/89395ae4-5378-4709-a8b2-5b412e709142-kube-api-access-hgsdr\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.047489 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3608c64-7b50-4a57-a0ea-578164629872-var-run\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.148752 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-etc-ovs\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.148836 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3608c64-7b50-4a57-a0ea-578164629872-var-run-ovn\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.148883 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-var-lib\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.148912 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3608c64-7b50-4a57-a0ea-578164629872-combined-ca-bundle\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.148933 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3608c64-7b50-4a57-a0ea-578164629872-ovn-controller-tls-certs\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.148971 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-var-run\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.149004 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgsdr\" (UniqueName: \"kubernetes.io/projected/89395ae4-5378-4709-a8b2-5b412e709142-kube-api-access-hgsdr\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.149047 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3608c64-7b50-4a57-a0ea-578164629872-var-run\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.149073 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-var-log\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.149100 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7qt6\" (UniqueName: \"kubernetes.io/projected/e3608c64-7b50-4a57-a0ea-578164629872-kube-api-access-g7qt6\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.149123 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89395ae4-5378-4709-a8b2-5b412e709142-scripts\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.149148 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3608c64-7b50-4a57-a0ea-578164629872-var-log-ovn\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.149177 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3608c64-7b50-4a57-a0ea-578164629872-scripts\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.149807 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-etc-ovs\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.150002 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3608c64-7b50-4a57-a0ea-578164629872-var-run-ovn\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.150200 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-var-lib\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.151147 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-var-run\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.151427 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3608c64-7b50-4a57-a0ea-578164629872-var-run\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.151543 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3608c64-7b50-4a57-a0ea-578164629872-var-log-ovn\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.151630 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/89395ae4-5378-4709-a8b2-5b412e709142-var-log\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.152877 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.153115 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.157899 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3608c64-7b50-4a57-a0ea-578164629872-combined-ca-bundle\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.161736 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3608c64-7b50-4a57-a0ea-578164629872-scripts\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.164442 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89395ae4-5378-4709-a8b2-5b412e709142-scripts\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.165134 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3608c64-7b50-4a57-a0ea-578164629872-ovn-controller-tls-certs\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.168196 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgsdr\" (UniqueName: \"kubernetes.io/projected/89395ae4-5378-4709-a8b2-5b412e709142-kube-api-access-hgsdr\") pod \"ovn-controller-ovs-248s6\" (UID: \"89395ae4-5378-4709-a8b2-5b412e709142\") " pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.171210 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7qt6\" (UniqueName: \"kubernetes.io/projected/e3608c64-7b50-4a57-a0ea-578164629872-kube-api-access-g7qt6\") pod \"ovn-controller-hxh6p\" (UID: \"e3608c64-7b50-4a57-a0ea-578164629872\") " pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.256203 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-vxtl2" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.262991 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:09 crc kubenswrapper[4835]: I0202 17:07:09.282773 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.625593 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.627218 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.629319 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.629715 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.629849 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-slzfp" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.630116 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.630260 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.647684 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.678932 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2f7d44-7579-4cd7-867c-77a46a7296cc-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.678994 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d2f7d44-7579-4cd7-867c-77a46a7296cc-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.679046 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d2f7d44-7579-4cd7-867c-77a46a7296cc-config\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.679093 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2f7d44-7579-4cd7-867c-77a46a7296cc-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.679125 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvxzj\" (UniqueName: \"kubernetes.io/projected/4d2f7d44-7579-4cd7-867c-77a46a7296cc-kube-api-access-gvxzj\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.679151 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.679207 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2f7d44-7579-4cd7-867c-77a46a7296cc-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.679226 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4d2f7d44-7579-4cd7-867c-77a46a7296cc-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.781131 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2f7d44-7579-4cd7-867c-77a46a7296cc-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.781191 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d2f7d44-7579-4cd7-867c-77a46a7296cc-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.781226 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d2f7d44-7579-4cd7-867c-77a46a7296cc-config\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.781261 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2f7d44-7579-4cd7-867c-77a46a7296cc-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.781305 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvxzj\" (UniqueName: \"kubernetes.io/projected/4d2f7d44-7579-4cd7-867c-77a46a7296cc-kube-api-access-gvxzj\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.781322 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.781365 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2f7d44-7579-4cd7-867c-77a46a7296cc-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.781383 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4d2f7d44-7579-4cd7-867c-77a46a7296cc-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.781940 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4d2f7d44-7579-4cd7-867c-77a46a7296cc-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.782755 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.783609 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d2f7d44-7579-4cd7-867c-77a46a7296cc-config\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.784215 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2f7d44-7579-4cd7-867c-77a46a7296cc-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.786422 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d2f7d44-7579-4cd7-867c-77a46a7296cc-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.786976 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2f7d44-7579-4cd7-867c-77a46a7296cc-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.787799 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2f7d44-7579-4cd7-867c-77a46a7296cc-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.799920 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvxzj\" (UniqueName: \"kubernetes.io/projected/4d2f7d44-7579-4cd7-867c-77a46a7296cc-kube-api-access-gvxzj\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.804348 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"4d2f7d44-7579-4cd7-867c-77a46a7296cc\") " pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: I0202 17:07:10.954992 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:10 crc kubenswrapper[4835]: W0202 17:07:10.968025 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ddf6049_a0d8_429c_b8ce_b52702f4ee60.slice/crio-251360b863ebf09072834186269e1cb759543038e161d94f3103b1309d124262 WatchSource:0}: Error finding container 251360b863ebf09072834186269e1cb759543038e161d94f3103b1309d124262: Status 404 returned error can't find the container with id 251360b863ebf09072834186269e1cb759543038e161d94f3103b1309d124262 Feb 02 17:07:10 crc kubenswrapper[4835]: W0202 17:07:10.971089 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cb4e8f7_3881_4fef_9056_0e2f149aab21.slice/crio-d7c52681deda19177201596fffd71b2de67fbc321c2ace1d46a817b6a21bb383 WatchSource:0}: Error finding container d7c52681deda19177201596fffd71b2de67fbc321c2ace1d46a817b6a21bb383: Status 404 returned error can't find the container with id d7c52681deda19177201596fffd71b2de67fbc321c2ace1d46a817b6a21bb383 Feb 02 17:07:11 crc kubenswrapper[4835]: I0202 17:07:11.009359 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8ddf6049-a0d8-429c-b8ce-b52702f4ee60","Type":"ContainerStarted","Data":"251360b863ebf09072834186269e1cb759543038e161d94f3103b1309d124262"} Feb 02 17:07:11 crc kubenswrapper[4835]: I0202 17:07:11.011621 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5cb4e8f7-3881-4fef-9056-0e2f149aab21","Type":"ContainerStarted","Data":"d7c52681deda19177201596fffd71b2de67fbc321c2ace1d46a817b6a21bb383"} Feb 02 17:07:11 crc kubenswrapper[4835]: E0202 17:07:11.801509 4835 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 02 17:07:11 crc kubenswrapper[4835]: E0202 17:07:11.802031 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-955g8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-xf47z_openstack(95780417-773e-4112-b118-9a15aa9944b7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 17:07:11 crc kubenswrapper[4835]: E0202 17:07:11.803245 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" podUID="95780417-773e-4112-b118-9a15aa9944b7" Feb 02 17:07:11 crc kubenswrapper[4835]: E0202 17:07:11.811703 4835 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 02 17:07:11 crc kubenswrapper[4835]: E0202 17:07:11.811850 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rrdvh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-k86qz_openstack(3c49cfa8-49a8-45ff-8088-303906cc33a9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 17:07:11 crc kubenswrapper[4835]: E0202 17:07:11.813176 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" podUID="3c49cfa8-49a8-45ff-8088-303906cc33a9" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.046850 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.066683 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.071516 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.071589 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.073211 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-v8pjx" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.074077 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.074304 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.105355 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03234de4-e1af-4911-93b4-6da716177367-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.105396 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03234de4-e1af-4911-93b4-6da716177367-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.105419 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.105443 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2k9v\" (UniqueName: \"kubernetes.io/projected/03234de4-e1af-4911-93b4-6da716177367-kube-api-access-v2k9v\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.105475 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03234de4-e1af-4911-93b4-6da716177367-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.105507 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/03234de4-e1af-4911-93b4-6da716177367-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.105564 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03234de4-e1af-4911-93b4-6da716177367-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.105586 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03234de4-e1af-4911-93b4-6da716177367-config\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.211816 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03234de4-e1af-4911-93b4-6da716177367-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.211909 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.212267 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03234de4-e1af-4911-93b4-6da716177367-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.212350 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.212606 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.213016 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2k9v\" (UniqueName: \"kubernetes.io/projected/03234de4-e1af-4911-93b4-6da716177367-kube-api-access-v2k9v\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.213069 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03234de4-e1af-4911-93b4-6da716177367-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.213104 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/03234de4-e1af-4911-93b4-6da716177367-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.213177 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03234de4-e1af-4911-93b4-6da716177367-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.213202 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03234de4-e1af-4911-93b4-6da716177367-config\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.213244 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03234de4-e1af-4911-93b4-6da716177367-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.214628 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03234de4-e1af-4911-93b4-6da716177367-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.214689 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03234de4-e1af-4911-93b4-6da716177367-config\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.217566 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/03234de4-e1af-4911-93b4-6da716177367-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.218873 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03234de4-e1af-4911-93b4-6da716177367-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.219446 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03234de4-e1af-4911-93b4-6da716177367-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.230469 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2k9v\" (UniqueName: \"kubernetes.io/projected/03234de4-e1af-4911-93b4-6da716177367-kube-api-access-v2k9v\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.243582 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"03234de4-e1af-4911-93b4-6da716177367\") " pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: E0202 17:07:12.408920 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cb969a5_3a5e_4bb5_8499_87d9fe1fb148.slice/crio-68ce94e8bcfe087aceec7048e862f2521633981bc60095a67f278c51f659e0db.scope\": RecentStats: unable to find data in memory cache]" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.430390 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.529697 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.545588 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.552979 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxh6p"] Feb 02 17:07:12 crc kubenswrapper[4835]: W0202 17:07:12.556411 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5b7fac0_4bb3_4138_9618_96bf25cbdde5.slice/crio-624eee2e2b4ceb999a037dbdfeadaf1b10d3d1de96e5fbd8d7d28641b4da4eeb WatchSource:0}: Error finding container 624eee2e2b4ceb999a037dbdfeadaf1b10d3d1de96e5fbd8d7d28641b4da4eeb: Status 404 returned error can't find the container with id 624eee2e2b4ceb999a037dbdfeadaf1b10d3d1de96e5fbd8d7d28641b4da4eeb Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.559249 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.566555 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 17:07:12 crc kubenswrapper[4835]: W0202 17:07:12.573183 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeec68dd7_cf6a_45a4_a036_19bcf050c892.slice/crio-02d037aba4a3840bef07983771d6c47fff1bf952e3c0e3f951fc691fde341533 WatchSource:0}: Error finding container 02d037aba4a3840bef07983771d6c47fff1bf952e3c0e3f951fc691fde341533: Status 404 returned error can't find the container with id 02d037aba4a3840bef07983771d6c47fff1bf952e3c0e3f951fc691fde341533 Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.577498 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.620630 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95780417-773e-4112-b118-9a15aa9944b7-config\") pod \"95780417-773e-4112-b118-9a15aa9944b7\" (UID: \"95780417-773e-4112-b118-9a15aa9944b7\") " Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.620725 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-dns-svc\") pod \"3c49cfa8-49a8-45ff-8088-303906cc33a9\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.620779 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-955g8\" (UniqueName: \"kubernetes.io/projected/95780417-773e-4112-b118-9a15aa9944b7-kube-api-access-955g8\") pod \"95780417-773e-4112-b118-9a15aa9944b7\" (UID: \"95780417-773e-4112-b118-9a15aa9944b7\") " Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.620828 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrdvh\" (UniqueName: \"kubernetes.io/projected/3c49cfa8-49a8-45ff-8088-303906cc33a9-kube-api-access-rrdvh\") pod \"3c49cfa8-49a8-45ff-8088-303906cc33a9\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.620878 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-config\") pod \"3c49cfa8-49a8-45ff-8088-303906cc33a9\" (UID: \"3c49cfa8-49a8-45ff-8088-303906cc33a9\") " Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.621421 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95780417-773e-4112-b118-9a15aa9944b7-config" (OuterVolumeSpecName: "config") pod "95780417-773e-4112-b118-9a15aa9944b7" (UID: "95780417-773e-4112-b118-9a15aa9944b7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.621712 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-config" (OuterVolumeSpecName: "config") pod "3c49cfa8-49a8-45ff-8088-303906cc33a9" (UID: "3c49cfa8-49a8-45ff-8088-303906cc33a9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.622476 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3c49cfa8-49a8-45ff-8088-303906cc33a9" (UID: "3c49cfa8-49a8-45ff-8088-303906cc33a9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.626109 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c49cfa8-49a8-45ff-8088-303906cc33a9-kube-api-access-rrdvh" (OuterVolumeSpecName: "kube-api-access-rrdvh") pod "3c49cfa8-49a8-45ff-8088-303906cc33a9" (UID: "3c49cfa8-49a8-45ff-8088-303906cc33a9"). InnerVolumeSpecName "kube-api-access-rrdvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.627109 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95780417-773e-4112-b118-9a15aa9944b7-kube-api-access-955g8" (OuterVolumeSpecName: "kube-api-access-955g8") pod "95780417-773e-4112-b118-9a15aa9944b7" (UID: "95780417-773e-4112-b118-9a15aa9944b7"). InnerVolumeSpecName "kube-api-access-955g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.722563 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.722603 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-955g8\" (UniqueName: \"kubernetes.io/projected/95780417-773e-4112-b118-9a15aa9944b7-kube-api-access-955g8\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.722619 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrdvh\" (UniqueName: \"kubernetes.io/projected/3c49cfa8-49a8-45ff-8088-303906cc33a9-kube-api-access-rrdvh\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.722631 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c49cfa8-49a8-45ff-8088-303906cc33a9-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.722646 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95780417-773e-4112-b118-9a15aa9944b7-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:12 crc kubenswrapper[4835]: I0202 17:07:12.797658 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-248s6"] Feb 02 17:07:12 crc kubenswrapper[4835]: W0202 17:07:12.810439 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89395ae4_5378_4709_a8b2_5b412e709142.slice/crio-8e83717d595b03ab34b0302a422e03ef7cce5cbca1d260dbcc60b6581d6eaa1c WatchSource:0}: Error finding container 8e83717d595b03ab34b0302a422e03ef7cce5cbca1d260dbcc60b6581d6eaa1c: Status 404 returned error can't find the container with id 8e83717d595b03ab34b0302a422e03ef7cce5cbca1d260dbcc60b6581d6eaa1c Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.024792 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 17:07:13 crc kubenswrapper[4835]: W0202 17:07:13.034093 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03234de4_e1af_4911_93b4_6da716177367.slice/crio-2d029d9db68f1b609306a714df76f7b61991b8eaaf6bc3d532615c900d8f1697 WatchSource:0}: Error finding container 2d029d9db68f1b609306a714df76f7b61991b8eaaf6bc3d532615c900d8f1697: Status 404 returned error can't find the container with id 2d029d9db68f1b609306a714df76f7b61991b8eaaf6bc3d532615c900d8f1697 Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.053843 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxh6p" event={"ID":"e3608c64-7b50-4a57-a0ea-578164629872","Type":"ContainerStarted","Data":"c473eea606a30e8464d28b148c5ac108c89e204ef3619dc81e28ec86c2fc0557"} Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.059616 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"03234de4-e1af-4911-93b4-6da716177367","Type":"ContainerStarted","Data":"2d029d9db68f1b609306a714df76f7b61991b8eaaf6bc3d532615c900d8f1697"} Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.061429 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b630cc52-70e1-4580-8d73-df2507194554","Type":"ContainerStarted","Data":"8c88fbb92c58ea89e80c4f4d21e621b584a1d638f8b5104839947b04081065a2"} Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.063089 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" event={"ID":"3c49cfa8-49a8-45ff-8088-303906cc33a9","Type":"ContainerDied","Data":"5470bf979217cfb2dc3a61302eac24bdbb776cfaa40b08377a6764b08ae2c25b"} Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.063165 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-k86qz" Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.066952 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"eec68dd7-cf6a-45a4-a036-19bcf050c892","Type":"ContainerStarted","Data":"02d037aba4a3840bef07983771d6c47fff1bf952e3c0e3f951fc691fde341533"} Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.071898 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f808d0ef-5504-4d6c-9551-28b94cb89838","Type":"ContainerStarted","Data":"c49868b3586ba6377537a195ec15267de926440fd412e3d532ffdadd48ffc290"} Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.074533 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"a5b7fac0-4bb3-4138-9618-96bf25cbdde5","Type":"ContainerStarted","Data":"624eee2e2b4ceb999a037dbdfeadaf1b10d3d1de96e5fbd8d7d28641b4da4eeb"} Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.076888 4835 generic.go:334] "Generic (PLEG): container finished" podID="18c2aaa4-d81b-4b79-a14c-2ac5041aa544" containerID="c08838c20d537507a71f2a78cf6427c28f0f81c5a078adf82abb5a022e4492a8" exitCode=0 Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.077045 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" event={"ID":"18c2aaa4-d81b-4b79-a14c-2ac5041aa544","Type":"ContainerDied","Data":"c08838c20d537507a71f2a78cf6427c28f0f81c5a078adf82abb5a022e4492a8"} Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.078335 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-248s6" event={"ID":"89395ae4-5378-4709-a8b2-5b412e709142","Type":"ContainerStarted","Data":"8e83717d595b03ab34b0302a422e03ef7cce5cbca1d260dbcc60b6581d6eaa1c"} Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.080294 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" event={"ID":"95780417-773e-4112-b118-9a15aa9944b7","Type":"ContainerDied","Data":"91c3b7e70d444b4f7e45a436bd73bf911dabfd8f7d348472c1a800f2e973737d"} Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.080310 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xf47z" Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.103181 4835 generic.go:334] "Generic (PLEG): container finished" podID="6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" containerID="68ce94e8bcfe087aceec7048e862f2521633981bc60095a67f278c51f659e0db" exitCode=0 Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.103287 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" event={"ID":"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148","Type":"ContainerDied","Data":"68ce94e8bcfe087aceec7048e862f2521633981bc60095a67f278c51f659e0db"} Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.210198 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-k86qz"] Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.216446 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-k86qz"] Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.266019 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xf47z"] Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.271190 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xf47z"] Feb 02 17:07:13 crc kubenswrapper[4835]: I0202 17:07:13.759963 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.869948 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.870331 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.938312 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-c6jjz"] Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.939294 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.942155 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.942767 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-c6jjz"] Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.969599 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-ovn-rundir\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.969660 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.969708 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-combined-ca-bundle\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.969756 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-ovs-rundir\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.969849 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgfpf\" (UniqueName: \"kubernetes.io/projected/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-kube-api-access-pgfpf\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:14 crc kubenswrapper[4835]: I0202 17:07:14.969880 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-config\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.071645 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b6c75"] Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.073029 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgfpf\" (UniqueName: \"kubernetes.io/projected/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-kube-api-access-pgfpf\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.073089 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-config\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.073151 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-ovn-rundir\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.073178 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.073225 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-combined-ca-bundle\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.073261 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-ovs-rundir\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.073675 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-ovs-rundir\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.073738 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-ovn-rundir\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.075610 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-config\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.080653 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-combined-ca-bundle\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.097859 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.103010 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgfpf\" (UniqueName: \"kubernetes.io/projected/9ce87c37-0b7a-4a7a-b90f-f34aaa078035-kube-api-access-pgfpf\") pod \"ovn-controller-metrics-c6jjz\" (UID: \"9ce87c37-0b7a-4a7a-b90f-f34aaa078035\") " pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.103077 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xdwcl"] Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.104358 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.107753 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.119874 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xdwcl"] Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.175065 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.175154 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-config\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.175180 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvm6h\" (UniqueName: \"kubernetes.io/projected/f051b4af-ab78-4c61-915b-e007e40aa082-kube-api-access-xvm6h\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.175305 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.205382 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c49cfa8-49a8-45ff-8088-303906cc33a9" path="/var/lib/kubelet/pods/3c49cfa8-49a8-45ff-8088-303906cc33a9/volumes" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.205837 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95780417-773e-4112-b118-9a15aa9944b7" path="/var/lib/kubelet/pods/95780417-773e-4112-b118-9a15aa9944b7/volumes" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.257574 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-c6jjz" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.278771 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.278943 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.278996 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-config\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.279023 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvm6h\" (UniqueName: \"kubernetes.io/projected/f051b4af-ab78-4c61-915b-e007e40aa082-kube-api-access-xvm6h\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.279828 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.280885 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-config\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.281791 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.300444 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvm6h\" (UniqueName: \"kubernetes.io/projected/f051b4af-ab78-4c61-915b-e007e40aa082-kube-api-access-xvm6h\") pod \"dnsmasq-dns-6bc7876d45-xdwcl\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.381184 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8pdms"] Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.410881 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-smzl5"] Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.412050 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.420764 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.428968 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-smzl5"] Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.477218 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.481840 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.481882 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-dns-svc\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.481910 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zl7r\" (UniqueName: \"kubernetes.io/projected/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-kube-api-access-5zl7r\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.481960 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.481985 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-config\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.583351 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.583415 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-dns-svc\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.583451 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zl7r\" (UniqueName: \"kubernetes.io/projected/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-kube-api-access-5zl7r\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.583511 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.583542 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-config\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.584443 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.584585 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.584588 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-dns-svc\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.584777 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-config\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.603466 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zl7r\" (UniqueName: \"kubernetes.io/projected/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-kube-api-access-5zl7r\") pod \"dnsmasq-dns-8554648995-smzl5\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:15 crc kubenswrapper[4835]: I0202 17:07:15.730265 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:17 crc kubenswrapper[4835]: W0202 17:07:17.720518 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d2f7d44_7579_4cd7_867c_77a46a7296cc.slice/crio-b5c0a6faa796dd14b192409922c11dd8a708a7062fa75f41a7be155acda22340 WatchSource:0}: Error finding container b5c0a6faa796dd14b192409922c11dd8a708a7062fa75f41a7be155acda22340: Status 404 returned error can't find the container with id b5c0a6faa796dd14b192409922c11dd8a708a7062fa75f41a7be155acda22340 Feb 02 17:07:18 crc kubenswrapper[4835]: I0202 17:07:18.144035 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"4d2f7d44-7579-4cd7-867c-77a46a7296cc","Type":"ContainerStarted","Data":"b5c0a6faa796dd14b192409922c11dd8a708a7062fa75f41a7be155acda22340"} Feb 02 17:07:20 crc kubenswrapper[4835]: I0202 17:07:20.763343 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xdwcl"] Feb 02 17:07:20 crc kubenswrapper[4835]: W0202 17:07:20.848505 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf051b4af_ab78_4c61_915b_e007e40aa082.slice/crio-fa37d87b2c72b5d23b62069a85ef5548c209189a2445235f05e303cc64364b69 WatchSource:0}: Error finding container fa37d87b2c72b5d23b62069a85ef5548c209189a2445235f05e303cc64364b69: Status 404 returned error can't find the container with id fa37d87b2c72b5d23b62069a85ef5548c209189a2445235f05e303cc64364b69 Feb 02 17:07:21 crc kubenswrapper[4835]: I0202 17:07:21.080441 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-c6jjz"] Feb 02 17:07:21 crc kubenswrapper[4835]: I0202 17:07:21.155739 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-smzl5"] Feb 02 17:07:21 crc kubenswrapper[4835]: W0202 17:07:21.165067 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ce87c37_0b7a_4a7a_b90f_f34aaa078035.slice/crio-5ac2e1bc8fb9d22f22bd33546d6e79a140e3b881050566d364f0d66c960c81fe WatchSource:0}: Error finding container 5ac2e1bc8fb9d22f22bd33546d6e79a140e3b881050566d364f0d66c960c81fe: Status 404 returned error can't find the container with id 5ac2e1bc8fb9d22f22bd33546d6e79a140e3b881050566d364f0d66c960c81fe Feb 02 17:07:21 crc kubenswrapper[4835]: I0202 17:07:21.211832 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" podUID="18c2aaa4-d81b-4b79-a14c-2ac5041aa544" containerName="dnsmasq-dns" containerID="cri-o://9313fde51b0b4bbfd3ea105918e4705d3a9e2bc6bf0d50cd1b07b59a873299f0" gracePeriod=10 Feb 02 17:07:21 crc kubenswrapper[4835]: W0202 17:07:21.213595 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f09b478_2bc6_4e0c_958f_7cab0354d7d4.slice/crio-c7bc7424aea84cc93d98101d400551c30b50e61ed97ed53939a0f4fd0717233b WatchSource:0}: Error finding container c7bc7424aea84cc93d98101d400551c30b50e61ed97ed53939a0f4fd0717233b: Status 404 returned error can't find the container with id c7bc7424aea84cc93d98101d400551c30b50e61ed97ed53939a0f4fd0717233b Feb 02 17:07:21 crc kubenswrapper[4835]: I0202 17:07:21.221241 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:07:21 crc kubenswrapper[4835]: I0202 17:07:21.221265 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" event={"ID":"18c2aaa4-d81b-4b79-a14c-2ac5041aa544","Type":"ContainerStarted","Data":"9313fde51b0b4bbfd3ea105918e4705d3a9e2bc6bf0d50cd1b07b59a873299f0"} Feb 02 17:07:21 crc kubenswrapper[4835]: I0202 17:07:21.221339 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" event={"ID":"f051b4af-ab78-4c61-915b-e007e40aa082","Type":"ContainerStarted","Data":"fa37d87b2c72b5d23b62069a85ef5548c209189a2445235f05e303cc64364b69"} Feb 02 17:07:21 crc kubenswrapper[4835]: I0202 17:07:21.234195 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" podStartSLOduration=11.023590708 podStartE2EDuration="23.234154835s" podCreationTimestamp="2026-02-02 17:06:58 +0000 UTC" firstStartedPulling="2026-02-02 17:06:59.740510455 +0000 UTC m=+1011.362114535" lastFinishedPulling="2026-02-02 17:07:11.951074592 +0000 UTC m=+1023.572678662" observedRunningTime="2026-02-02 17:07:21.226876039 +0000 UTC m=+1032.848480139" watchObservedRunningTime="2026-02-02 17:07:21.234154835 +0000 UTC m=+1032.855758915" Feb 02 17:07:22 crc kubenswrapper[4835]: I0202 17:07:22.225136 4835 generic.go:334] "Generic (PLEG): container finished" podID="18c2aaa4-d81b-4b79-a14c-2ac5041aa544" containerID="9313fde51b0b4bbfd3ea105918e4705d3a9e2bc6bf0d50cd1b07b59a873299f0" exitCode=0 Feb 02 17:07:22 crc kubenswrapper[4835]: I0202 17:07:22.225221 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" event={"ID":"18c2aaa4-d81b-4b79-a14c-2ac5041aa544","Type":"ContainerDied","Data":"9313fde51b0b4bbfd3ea105918e4705d3a9e2bc6bf0d50cd1b07b59a873299f0"} Feb 02 17:07:22 crc kubenswrapper[4835]: I0202 17:07:22.227175 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-c6jjz" event={"ID":"9ce87c37-0b7a-4a7a-b90f-f34aaa078035","Type":"ContainerStarted","Data":"5ac2e1bc8fb9d22f22bd33546d6e79a140e3b881050566d364f0d66c960c81fe"} Feb 02 17:07:22 crc kubenswrapper[4835]: I0202 17:07:22.228467 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-smzl5" event={"ID":"0f09b478-2bc6-4e0c-958f-7cab0354d7d4","Type":"ContainerStarted","Data":"c7bc7424aea84cc93d98101d400551c30b50e61ed97ed53939a0f4fd0717233b"} Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.253815 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-248s6" event={"ID":"89395ae4-5378-4709-a8b2-5b412e709142","Type":"ContainerStarted","Data":"f3039db9cb2a5218de650d0372dc584953095d8ebca3de875995ca591acc04e4"} Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.255391 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxh6p" event={"ID":"e3608c64-7b50-4a57-a0ea-578164629872","Type":"ContainerStarted","Data":"0e847ad344856e46ad059d0b9b71c238239020aaca842cf0d77457dd7549e196"} Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.797512 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.885540 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-dns-svc\") pod \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.885722 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7n9c\" (UniqueName: \"kubernetes.io/projected/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-kube-api-access-z7n9c\") pod \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.885764 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-config\") pod \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\" (UID: \"18c2aaa4-d81b-4b79-a14c-2ac5041aa544\") " Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.901130 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-kube-api-access-z7n9c" (OuterVolumeSpecName: "kube-api-access-z7n9c") pod "18c2aaa4-d81b-4b79-a14c-2ac5041aa544" (UID: "18c2aaa4-d81b-4b79-a14c-2ac5041aa544"). InnerVolumeSpecName "kube-api-access-z7n9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.941360 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-config" (OuterVolumeSpecName: "config") pod "18c2aaa4-d81b-4b79-a14c-2ac5041aa544" (UID: "18c2aaa4-d81b-4b79-a14c-2ac5041aa544"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.941901 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "18c2aaa4-d81b-4b79-a14c-2ac5041aa544" (UID: "18c2aaa4-d81b-4b79-a14c-2ac5041aa544"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.986995 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.987037 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7n9c\" (UniqueName: \"kubernetes.io/projected/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-kube-api-access-z7n9c\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:25 crc kubenswrapper[4835]: I0202 17:07:25.987052 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18c2aaa4-d81b-4b79-a14c-2ac5041aa544-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.267062 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" event={"ID":"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148","Type":"ContainerStarted","Data":"b3ebfd8ee17d8853d98b4e54ca38b2064b03000418bc1fd6b7a36d44156166f5"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.267138 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" podUID="6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" containerName="dnsmasq-dns" containerID="cri-o://b3ebfd8ee17d8853d98b4e54ca38b2064b03000418bc1fd6b7a36d44156166f5" gracePeriod=10 Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.267210 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.269133 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"a5b7fac0-4bb3-4138-9618-96bf25cbdde5","Type":"ContainerStarted","Data":"fedb82567be4884782588371f62e296d7ef7df177ed6eaa1f7579292cb2edd4e"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.269753 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.272305 4835 generic.go:334] "Generic (PLEG): container finished" podID="f051b4af-ab78-4c61-915b-e007e40aa082" containerID="10878eb4e6f1cab0acea9bd3c7fd6d4ed098330bcbdcdbc8a8e49ad546d14b52" exitCode=0 Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.272380 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" event={"ID":"f051b4af-ab78-4c61-915b-e007e40aa082","Type":"ContainerDied","Data":"10878eb4e6f1cab0acea9bd3c7fd6d4ed098330bcbdcdbc8a8e49ad546d14b52"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.274104 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"03234de4-e1af-4911-93b4-6da716177367","Type":"ContainerStarted","Data":"f24b983e1adea4c3f8aa5377cdc282893d0a44971cdc3301964dfeeb3ff86311"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.275573 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b630cc52-70e1-4580-8d73-df2507194554","Type":"ContainerStarted","Data":"f9a011dc90d06861e9f24da8aff4bfa84c1e7efd785dfbda73e63f3e4d726357"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.277292 4835 generic.go:334] "Generic (PLEG): container finished" podID="89395ae4-5378-4709-a8b2-5b412e709142" containerID="f3039db9cb2a5218de650d0372dc584953095d8ebca3de875995ca591acc04e4" exitCode=0 Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.277366 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-248s6" event={"ID":"89395ae4-5378-4709-a8b2-5b412e709142","Type":"ContainerDied","Data":"f3039db9cb2a5218de650d0372dc584953095d8ebca3de875995ca591acc04e4"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.282749 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5cb4e8f7-3881-4fef-9056-0e2f149aab21","Type":"ContainerStarted","Data":"62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.290215 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"eec68dd7-cf6a-45a4-a036-19bcf050c892","Type":"ContainerStarted","Data":"46c24477a2fe94cde8326d8fb3548afeaabe6e7e3f8db7ccc414464003caedf7"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.291538 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" podStartSLOduration=17.939493309 podStartE2EDuration="27.291523774s" podCreationTimestamp="2026-02-02 17:06:59 +0000 UTC" firstStartedPulling="2026-02-02 17:07:02.684068965 +0000 UTC m=+1014.305673045" lastFinishedPulling="2026-02-02 17:07:12.03609943 +0000 UTC m=+1023.657703510" observedRunningTime="2026-02-02 17:07:26.283743323 +0000 UTC m=+1037.905347413" watchObservedRunningTime="2026-02-02 17:07:26.291523774 +0000 UTC m=+1037.913127854" Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.295763 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f808d0ef-5504-4d6c-9551-28b94cb89838","Type":"ContainerStarted","Data":"6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.295860 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.297041 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"4d2f7d44-7579-4cd7-867c-77a46a7296cc","Type":"ContainerStarted","Data":"9e23fedcae91b7eb304779db42e6070b007e20b47ae6b4a72d7b25a7d1ca62f6"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.299709 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.300316 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-b6c75" event={"ID":"18c2aaa4-d81b-4b79-a14c-2ac5041aa544","Type":"ContainerDied","Data":"c60a431017f3e708df9ae087e47757cdb5bc752b5dea5a7ac760c886d7fe6441"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.300358 4835 scope.go:117] "RemoveContainer" containerID="9313fde51b0b4bbfd3ea105918e4705d3a9e2bc6bf0d50cd1b07b59a873299f0" Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.301795 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8ddf6049-a0d8-429c-b8ce-b52702f4ee60","Type":"ContainerStarted","Data":"d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.308861 4835 generic.go:334] "Generic (PLEG): container finished" podID="0f09b478-2bc6-4e0c-958f-7cab0354d7d4" containerID="0857dc59876a83aa4ce94f8bee5c02f6f740dfa1cb8c49b3120500b4db3ede6a" exitCode=0 Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.309509 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-smzl5" event={"ID":"0f09b478-2bc6-4e0c-958f-7cab0354d7d4","Type":"ContainerDied","Data":"0857dc59876a83aa4ce94f8bee5c02f6f740dfa1cb8c49b3120500b4db3ede6a"} Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.309538 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-hxh6p" Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.351050 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=15.581436923 podStartE2EDuration="23.351025449s" podCreationTimestamp="2026-02-02 17:07:03 +0000 UTC" firstStartedPulling="2026-02-02 17:07:12.568553731 +0000 UTC m=+1024.190157821" lastFinishedPulling="2026-02-02 17:07:20.338142267 +0000 UTC m=+1031.959746347" observedRunningTime="2026-02-02 17:07:26.350569076 +0000 UTC m=+1037.972173156" watchObservedRunningTime="2026-02-02 17:07:26.351025449 +0000 UTC m=+1037.972629529" Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.429126 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=9.297074774 podStartE2EDuration="22.429106431s" podCreationTimestamp="2026-02-02 17:07:04 +0000 UTC" firstStartedPulling="2026-02-02 17:07:12.557488037 +0000 UTC m=+1024.179092117" lastFinishedPulling="2026-02-02 17:07:25.689519694 +0000 UTC m=+1037.311123774" observedRunningTime="2026-02-02 17:07:26.41074418 +0000 UTC m=+1038.032348260" watchObservedRunningTime="2026-02-02 17:07:26.429106431 +0000 UTC m=+1038.050710511" Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.478321 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b6c75"] Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.486613 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b6c75"] Feb 02 17:07:26 crc kubenswrapper[4835]: I0202 17:07:26.525114 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-hxh6p" podStartSLOduration=10.455140414 podStartE2EDuration="18.525076857s" podCreationTimestamp="2026-02-02 17:07:08 +0000 UTC" firstStartedPulling="2026-02-02 17:07:12.565416342 +0000 UTC m=+1024.187020422" lastFinishedPulling="2026-02-02 17:07:20.635352775 +0000 UTC m=+1032.256956865" observedRunningTime="2026-02-02 17:07:26.518014658 +0000 UTC m=+1038.139618748" watchObservedRunningTime="2026-02-02 17:07:26.525076857 +0000 UTC m=+1038.146680937" Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.203725 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18c2aaa4-d81b-4b79-a14c-2ac5041aa544" path="/var/lib/kubelet/pods/18c2aaa4-d81b-4b79-a14c-2ac5041aa544/volumes" Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.263912 4835 scope.go:117] "RemoveContainer" containerID="c08838c20d537507a71f2a78cf6427c28f0f81c5a078adf82abb5a022e4492a8" Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.327111 4835 generic.go:334] "Generic (PLEG): container finished" podID="6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" containerID="b3ebfd8ee17d8853d98b4e54ca38b2064b03000418bc1fd6b7a36d44156166f5" exitCode=0 Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.327190 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" event={"ID":"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148","Type":"ContainerDied","Data":"b3ebfd8ee17d8853d98b4e54ca38b2064b03000418bc1fd6b7a36d44156166f5"} Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.767686 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.847517 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-dns-svc\") pod \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.847576 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-config\") pod \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.847794 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtspr\" (UniqueName: \"kubernetes.io/projected/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-kube-api-access-xtspr\") pod \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\" (UID: \"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148\") " Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.852870 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-kube-api-access-xtspr" (OuterVolumeSpecName: "kube-api-access-xtspr") pod "6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" (UID: "6cb969a5-3a5e-4bb5-8499-87d9fe1fb148"). InnerVolumeSpecName "kube-api-access-xtspr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.949643 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtspr\" (UniqueName: \"kubernetes.io/projected/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-kube-api-access-xtspr\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.965010 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" (UID: "6cb969a5-3a5e-4bb5-8499-87d9fe1fb148"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:27 crc kubenswrapper[4835]: I0202 17:07:27.968711 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-config" (OuterVolumeSpecName: "config") pod "6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" (UID: "6cb969a5-3a5e-4bb5-8499-87d9fe1fb148"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.051706 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.051931 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.343475 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-248s6" event={"ID":"89395ae4-5378-4709-a8b2-5b412e709142","Type":"ContainerStarted","Data":"9b3a0430f7f685ee74fa9f3e42719b3fd58036949f563466aaab498c37a9ca8d"} Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.345140 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-248s6" event={"ID":"89395ae4-5378-4709-a8b2-5b412e709142","Type":"ContainerStarted","Data":"6b41ada1f77e980ccdfa60f3060dbc8cabca366cb15bdd9cfcb8859f7a4acfd7"} Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.345450 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.348252 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-c6jjz" event={"ID":"9ce87c37-0b7a-4a7a-b90f-f34aaa078035","Type":"ContainerStarted","Data":"4f8add9e6c36f2b206cc96467616caf52fedd63b5a60625e9d42a381eb904f4f"} Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.350739 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-smzl5" event={"ID":"0f09b478-2bc6-4e0c-958f-7cab0354d7d4","Type":"ContainerStarted","Data":"b7065bb9bb259c8dd1bbf1cdd83e2744622006063b844fcaf37ccc773b58122d"} Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.350913 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.353600 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"4d2f7d44-7579-4cd7-867c-77a46a7296cc","Type":"ContainerStarted","Data":"bef15af2b6874af14c8dc35c4a017e7a1e307b546376af1d462c8a495740227d"} Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.356497 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" event={"ID":"6cb969a5-3a5e-4bb5-8499-87d9fe1fb148","Type":"ContainerDied","Data":"e774724d2b4c9a904d5c7b19ba48c339ab15d29780c03dc47366ed434a9a65f5"} Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.356527 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-8pdms" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.356545 4835 scope.go:117] "RemoveContainer" containerID="b3ebfd8ee17d8853d98b4e54ca38b2064b03000418bc1fd6b7a36d44156166f5" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.359520 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" event={"ID":"f051b4af-ab78-4c61-915b-e007e40aa082","Type":"ContainerStarted","Data":"2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c"} Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.360578 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.375175 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"03234de4-e1af-4911-93b4-6da716177367","Type":"ContainerStarted","Data":"285799ddd85f5475dc2746ab282e96b7855c259da4432922fd39352996e46d34"} Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.382177 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-248s6" podStartSLOduration=12.448106694 podStartE2EDuration="20.381319845s" podCreationTimestamp="2026-02-02 17:07:08 +0000 UTC" firstStartedPulling="2026-02-02 17:07:12.812196181 +0000 UTC m=+1024.433800261" lastFinishedPulling="2026-02-02 17:07:20.745409332 +0000 UTC m=+1032.367013412" observedRunningTime="2026-02-02 17:07:28.365621001 +0000 UTC m=+1039.987225101" watchObservedRunningTime="2026-02-02 17:07:28.381319845 +0000 UTC m=+1040.002923955" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.387371 4835 scope.go:117] "RemoveContainer" containerID="68ce94e8bcfe087aceec7048e862f2521633981bc60095a67f278c51f659e0db" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.398633 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-smzl5" podStartSLOduration=13.398610943 podStartE2EDuration="13.398610943s" podCreationTimestamp="2026-02-02 17:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:07:28.39813101 +0000 UTC m=+1040.019735100" watchObservedRunningTime="2026-02-02 17:07:28.398610943 +0000 UTC m=+1040.020215043" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.437305 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-c6jjz" podStartSLOduration=8.024961077 podStartE2EDuration="14.437282596s" podCreationTimestamp="2026-02-02 17:07:14 +0000 UTC" firstStartedPulling="2026-02-02 17:07:21.206847452 +0000 UTC m=+1032.828451542" lastFinishedPulling="2026-02-02 17:07:27.619168981 +0000 UTC m=+1039.240773061" observedRunningTime="2026-02-02 17:07:28.425303598 +0000 UTC m=+1040.046907688" watchObservedRunningTime="2026-02-02 17:07:28.437282596 +0000 UTC m=+1040.058886676" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.454190 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" podStartSLOduration=13.454166243 podStartE2EDuration="13.454166243s" podCreationTimestamp="2026-02-02 17:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:07:28.45085496 +0000 UTC m=+1040.072459040" watchObservedRunningTime="2026-02-02 17:07:28.454166243 +0000 UTC m=+1040.075770323" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.479331 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=9.527392847 podStartE2EDuration="19.479312074s" podCreationTimestamp="2026-02-02 17:07:09 +0000 UTC" firstStartedPulling="2026-02-02 17:07:17.722181026 +0000 UTC m=+1029.343785156" lastFinishedPulling="2026-02-02 17:07:27.674100303 +0000 UTC m=+1039.295704383" observedRunningTime="2026-02-02 17:07:28.472750198 +0000 UTC m=+1040.094354278" watchObservedRunningTime="2026-02-02 17:07:28.479312074 +0000 UTC m=+1040.100916154" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.506047 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=2.900457865 podStartE2EDuration="17.506023048s" podCreationTimestamp="2026-02-02 17:07:11 +0000 UTC" firstStartedPulling="2026-02-02 17:07:13.037818892 +0000 UTC m=+1024.659422972" lastFinishedPulling="2026-02-02 17:07:27.643384075 +0000 UTC m=+1039.264988155" observedRunningTime="2026-02-02 17:07:28.500591875 +0000 UTC m=+1040.122195955" watchObservedRunningTime="2026-02-02 17:07:28.506023048 +0000 UTC m=+1040.127627128" Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.539362 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8pdms"] Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.546800 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8pdms"] Feb 02 17:07:28 crc kubenswrapper[4835]: I0202 17:07:28.956035 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:29 crc kubenswrapper[4835]: I0202 17:07:29.198487 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" path="/var/lib/kubelet/pods/6cb969a5-3a5e-4bb5-8499-87d9fe1fb148/volumes" Feb 02 17:07:29 crc kubenswrapper[4835]: I0202 17:07:29.283549 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:30 crc kubenswrapper[4835]: I0202 17:07:30.399049 4835 generic.go:334] "Generic (PLEG): container finished" podID="b630cc52-70e1-4580-8d73-df2507194554" containerID="f9a011dc90d06861e9f24da8aff4bfa84c1e7efd785dfbda73e63f3e4d726357" exitCode=0 Feb 02 17:07:30 crc kubenswrapper[4835]: I0202 17:07:30.399150 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b630cc52-70e1-4580-8d73-df2507194554","Type":"ContainerDied","Data":"f9a011dc90d06861e9f24da8aff4bfa84c1e7efd785dfbda73e63f3e4d726357"} Feb 02 17:07:30 crc kubenswrapper[4835]: I0202 17:07:30.402149 4835 generic.go:334] "Generic (PLEG): container finished" podID="eec68dd7-cf6a-45a4-a036-19bcf050c892" containerID="46c24477a2fe94cde8326d8fb3548afeaabe6e7e3f8db7ccc414464003caedf7" exitCode=0 Feb 02 17:07:30 crc kubenswrapper[4835]: I0202 17:07:30.402202 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"eec68dd7-cf6a-45a4-a036-19bcf050c892","Type":"ContainerDied","Data":"46c24477a2fe94cde8326d8fb3548afeaabe6e7e3f8db7ccc414464003caedf7"} Feb 02 17:07:30 crc kubenswrapper[4835]: I0202 17:07:30.431053 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:30 crc kubenswrapper[4835]: I0202 17:07:30.492935 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:30 crc kubenswrapper[4835]: I0202 17:07:30.956004 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:31 crc kubenswrapper[4835]: I0202 17:07:31.414400 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"eec68dd7-cf6a-45a4-a036-19bcf050c892","Type":"ContainerStarted","Data":"b35590ae5788ec4bb89353905cd21cc9e2a87a24945084a862ed617099aac2df"} Feb 02 17:07:31 crc kubenswrapper[4835]: I0202 17:07:31.417050 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b630cc52-70e1-4580-8d73-df2507194554","Type":"ContainerStarted","Data":"12e8ec6dce728f809246a1fa3a76d72a2844245f0aa77ac85410f29cf2ed75a9"} Feb 02 17:07:31 crc kubenswrapper[4835]: I0202 17:07:31.417556 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:31 crc kubenswrapper[4835]: I0202 17:07:31.452784 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=23.928678173 podStartE2EDuration="31.452745106s" podCreationTimestamp="2026-02-02 17:07:00 +0000 UTC" firstStartedPulling="2026-02-02 17:07:12.577071182 +0000 UTC m=+1024.198675262" lastFinishedPulling="2026-02-02 17:07:20.101138115 +0000 UTC m=+1031.722742195" observedRunningTime="2026-02-02 17:07:31.443986358 +0000 UTC m=+1043.065590438" watchObservedRunningTime="2026-02-02 17:07:31.452745106 +0000 UTC m=+1043.074349186" Feb 02 17:07:31 crc kubenswrapper[4835]: I0202 17:07:31.477108 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Feb 02 17:07:31 crc kubenswrapper[4835]: I0202 17:07:31.487718 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=22.072591393 podStartE2EDuration="30.487692023s" podCreationTimestamp="2026-02-02 17:07:01 +0000 UTC" firstStartedPulling="2026-02-02 17:07:12.329475459 +0000 UTC m=+1023.951079539" lastFinishedPulling="2026-02-02 17:07:20.744576089 +0000 UTC m=+1032.366180169" observedRunningTime="2026-02-02 17:07:31.477507635 +0000 UTC m=+1043.099111715" watchObservedRunningTime="2026-02-02 17:07:31.487692023 +0000 UTC m=+1043.109296103" Feb 02 17:07:31 crc kubenswrapper[4835]: I0202 17:07:31.841022 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Feb 02 17:07:31 crc kubenswrapper[4835]: I0202 17:07:31.841068 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Feb 02 17:07:31 crc kubenswrapper[4835]: I0202 17:07:31.992152 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.458248 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.717361 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Feb 02 17:07:32 crc kubenswrapper[4835]: E0202 17:07:32.717764 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c2aaa4-d81b-4b79-a14c-2ac5041aa544" containerName="dnsmasq-dns" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.717780 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c2aaa4-d81b-4b79-a14c-2ac5041aa544" containerName="dnsmasq-dns" Feb 02 17:07:32 crc kubenswrapper[4835]: E0202 17:07:32.717801 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" containerName="dnsmasq-dns" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.717808 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" containerName="dnsmasq-dns" Feb 02 17:07:32 crc kubenswrapper[4835]: E0202 17:07:32.717825 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" containerName="init" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.717833 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" containerName="init" Feb 02 17:07:32 crc kubenswrapper[4835]: E0202 17:07:32.717850 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c2aaa4-d81b-4b79-a14c-2ac5041aa544" containerName="init" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.717857 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c2aaa4-d81b-4b79-a14c-2ac5041aa544" containerName="init" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.718015 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="18c2aaa4-d81b-4b79-a14c-2ac5041aa544" containerName="dnsmasq-dns" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.718026 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cb969a5-3a5e-4bb5-8499-87d9fe1fb148" containerName="dnsmasq-dns" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.718985 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.723445 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-dsjvs" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.723863 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.724813 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.726386 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.735035 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.826704 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.826808 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-config\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.826850 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.826900 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.826924 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltkjz\" (UniqueName: \"kubernetes.io/projected/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-kube-api-access-ltkjz\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.826956 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-scripts\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.826991 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.927880 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.927972 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-config\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.928004 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.928027 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.928047 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltkjz\" (UniqueName: \"kubernetes.io/projected/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-kube-api-access-ltkjz\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.928073 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-scripts\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.928105 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.928709 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.928966 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-config\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.929145 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-scripts\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.933839 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.934667 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.940707 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:32 crc kubenswrapper[4835]: I0202 17:07:32.949039 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltkjz\" (UniqueName: \"kubernetes.io/projected/a9b8ae61-599b-4f97-84a8-6af5a6e37e52-kube-api-access-ltkjz\") pod \"ovn-northd-0\" (UID: \"a9b8ae61-599b-4f97-84a8-6af5a6e37e52\") " pod="openstack/ovn-northd-0" Feb 02 17:07:33 crc kubenswrapper[4835]: I0202 17:07:33.051580 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 02 17:07:33 crc kubenswrapper[4835]: I0202 17:07:33.249728 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:33 crc kubenswrapper[4835]: I0202 17:07:33.249783 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:33 crc kubenswrapper[4835]: I0202 17:07:33.483542 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 02 17:07:33 crc kubenswrapper[4835]: I0202 17:07:33.575934 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Feb 02 17:07:34 crc kubenswrapper[4835]: I0202 17:07:34.446936 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a9b8ae61-599b-4f97-84a8-6af5a6e37e52","Type":"ContainerStarted","Data":"0ec4d4a09201884c97199c37700e9c496128c881a19390bbdd39e379ce4b6d0e"} Feb 02 17:07:35 crc kubenswrapper[4835]: I0202 17:07:35.276752 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 02 17:07:35 crc kubenswrapper[4835]: I0202 17:07:35.479852 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:35 crc kubenswrapper[4835]: I0202 17:07:35.732949 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:07:35 crc kubenswrapper[4835]: I0202 17:07:35.782531 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xdwcl"] Feb 02 17:07:36 crc kubenswrapper[4835]: I0202 17:07:36.065148 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:36 crc kubenswrapper[4835]: I0202 17:07:36.170761 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Feb 02 17:07:36 crc kubenswrapper[4835]: I0202 17:07:36.464198 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a9b8ae61-599b-4f97-84a8-6af5a6e37e52","Type":"ContainerStarted","Data":"c62494856ccdca166fecb6b1c308db8cbaec6c7d6b936db875c300e42feda0e5"} Feb 02 17:07:36 crc kubenswrapper[4835]: I0202 17:07:36.464301 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a9b8ae61-599b-4f97-84a8-6af5a6e37e52","Type":"ContainerStarted","Data":"d362fc7d11185bce1b7c1f2dd8674a4f42c1a75658afd384580f810f611f8574"} Feb 02 17:07:36 crc kubenswrapper[4835]: I0202 17:07:36.464564 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Feb 02 17:07:36 crc kubenswrapper[4835]: I0202 17:07:36.464771 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" podUID="f051b4af-ab78-4c61-915b-e007e40aa082" containerName="dnsmasq-dns" containerID="cri-o://2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c" gracePeriod=10 Feb 02 17:07:36 crc kubenswrapper[4835]: I0202 17:07:36.492604 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.41740828 podStartE2EDuration="4.492580433s" podCreationTimestamp="2026-02-02 17:07:32 +0000 UTC" firstStartedPulling="2026-02-02 17:07:33.489377459 +0000 UTC m=+1045.110981559" lastFinishedPulling="2026-02-02 17:07:35.564549632 +0000 UTC m=+1047.186153712" observedRunningTime="2026-02-02 17:07:36.48361669 +0000 UTC m=+1048.105220760" watchObservedRunningTime="2026-02-02 17:07:36.492580433 +0000 UTC m=+1048.114184513" Feb 02 17:07:36 crc kubenswrapper[4835]: I0202 17:07:36.907690 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.003574 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-config\") pod \"f051b4af-ab78-4c61-915b-e007e40aa082\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.003656 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-ovsdbserver-sb\") pod \"f051b4af-ab78-4c61-915b-e007e40aa082\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.003734 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvm6h\" (UniqueName: \"kubernetes.io/projected/f051b4af-ab78-4c61-915b-e007e40aa082-kube-api-access-xvm6h\") pod \"f051b4af-ab78-4c61-915b-e007e40aa082\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.003787 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-dns-svc\") pod \"f051b4af-ab78-4c61-915b-e007e40aa082\" (UID: \"f051b4af-ab78-4c61-915b-e007e40aa082\") " Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.009514 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f051b4af-ab78-4c61-915b-e007e40aa082-kube-api-access-xvm6h" (OuterVolumeSpecName: "kube-api-access-xvm6h") pod "f051b4af-ab78-4c61-915b-e007e40aa082" (UID: "f051b4af-ab78-4c61-915b-e007e40aa082"). InnerVolumeSpecName "kube-api-access-xvm6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.040461 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-config" (OuterVolumeSpecName: "config") pod "f051b4af-ab78-4c61-915b-e007e40aa082" (UID: "f051b4af-ab78-4c61-915b-e007e40aa082"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.043533 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f051b4af-ab78-4c61-915b-e007e40aa082" (UID: "f051b4af-ab78-4c61-915b-e007e40aa082"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.049942 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f051b4af-ab78-4c61-915b-e007e40aa082" (UID: "f051b4af-ab78-4c61-915b-e007e40aa082"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.106050 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.106089 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.106101 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f051b4af-ab78-4c61-915b-e007e40aa082-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.106114 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvm6h\" (UniqueName: \"kubernetes.io/projected/f051b4af-ab78-4c61-915b-e007e40aa082-kube-api-access-xvm6h\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.472514 4835 generic.go:334] "Generic (PLEG): container finished" podID="f051b4af-ab78-4c61-915b-e007e40aa082" containerID="2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c" exitCode=0 Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.472564 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.472582 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" event={"ID":"f051b4af-ab78-4c61-915b-e007e40aa082","Type":"ContainerDied","Data":"2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c"} Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.474052 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xdwcl" event={"ID":"f051b4af-ab78-4c61-915b-e007e40aa082","Type":"ContainerDied","Data":"fa37d87b2c72b5d23b62069a85ef5548c209189a2445235f05e303cc64364b69"} Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.474078 4835 scope.go:117] "RemoveContainer" containerID="2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.500068 4835 scope.go:117] "RemoveContainer" containerID="10878eb4e6f1cab0acea9bd3c7fd6d4ed098330bcbdcdbc8a8e49ad546d14b52" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.503557 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xdwcl"] Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.510172 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xdwcl"] Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.518664 4835 scope.go:117] "RemoveContainer" containerID="2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c" Feb 02 17:07:37 crc kubenswrapper[4835]: E0202 17:07:37.519506 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c\": container with ID starting with 2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c not found: ID does not exist" containerID="2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.519565 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c"} err="failed to get container status \"2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c\": rpc error: code = NotFound desc = could not find container \"2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c\": container with ID starting with 2e631090cff66c4af1622d3cf894e0b8c1860ead933283cb6e9c47945dc04e9c not found: ID does not exist" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.519595 4835 scope.go:117] "RemoveContainer" containerID="10878eb4e6f1cab0acea9bd3c7fd6d4ed098330bcbdcdbc8a8e49ad546d14b52" Feb 02 17:07:37 crc kubenswrapper[4835]: E0202 17:07:37.520057 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10878eb4e6f1cab0acea9bd3c7fd6d4ed098330bcbdcdbc8a8e49ad546d14b52\": container with ID starting with 10878eb4e6f1cab0acea9bd3c7fd6d4ed098330bcbdcdbc8a8e49ad546d14b52 not found: ID does not exist" containerID="10878eb4e6f1cab0acea9bd3c7fd6d4ed098330bcbdcdbc8a8e49ad546d14b52" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.520101 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10878eb4e6f1cab0acea9bd3c7fd6d4ed098330bcbdcdbc8a8e49ad546d14b52"} err="failed to get container status \"10878eb4e6f1cab0acea9bd3c7fd6d4ed098330bcbdcdbc8a8e49ad546d14b52\": rpc error: code = NotFound desc = could not find container \"10878eb4e6f1cab0acea9bd3c7fd6d4ed098330bcbdcdbc8a8e49ad546d14b52\": container with ID starting with 10878eb4e6f1cab0acea9bd3c7fd6d4ed098330bcbdcdbc8a8e49ad546d14b52 not found: ID does not exist" Feb 02 17:07:37 crc kubenswrapper[4835]: I0202 17:07:37.925154 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Feb 02 17:07:38 crc kubenswrapper[4835]: I0202 17:07:38.014957 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Feb 02 17:07:39 crc kubenswrapper[4835]: I0202 17:07:39.199186 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f051b4af-ab78-4c61-915b-e007e40aa082" path="/var/lib/kubelet/pods/f051b4af-ab78-4c61-915b-e007e40aa082/volumes" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.593491 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-288zd"] Feb 02 17:07:40 crc kubenswrapper[4835]: E0202 17:07:40.594326 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f051b4af-ab78-4c61-915b-e007e40aa082" containerName="init" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.594345 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f051b4af-ab78-4c61-915b-e007e40aa082" containerName="init" Feb 02 17:07:40 crc kubenswrapper[4835]: E0202 17:07:40.594419 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f051b4af-ab78-4c61-915b-e007e40aa082" containerName="dnsmasq-dns" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.594431 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f051b4af-ab78-4c61-915b-e007e40aa082" containerName="dnsmasq-dns" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.594665 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f051b4af-ab78-4c61-915b-e007e40aa082" containerName="dnsmasq-dns" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.595379 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-288zd" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.597759 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.610013 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-288zd"] Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.775209 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-operator-scripts\") pod \"root-account-create-update-288zd\" (UID: \"0aee08af-96be-4e1a-8e1b-94cec0af8aa1\") " pod="openstack/root-account-create-update-288zd" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.775282 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktsbw\" (UniqueName: \"kubernetes.io/projected/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-kube-api-access-ktsbw\") pod \"root-account-create-update-288zd\" (UID: \"0aee08af-96be-4e1a-8e1b-94cec0af8aa1\") " pod="openstack/root-account-create-update-288zd" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.876751 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-operator-scripts\") pod \"root-account-create-update-288zd\" (UID: \"0aee08af-96be-4e1a-8e1b-94cec0af8aa1\") " pod="openstack/root-account-create-update-288zd" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.876813 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktsbw\" (UniqueName: \"kubernetes.io/projected/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-kube-api-access-ktsbw\") pod \"root-account-create-update-288zd\" (UID: \"0aee08af-96be-4e1a-8e1b-94cec0af8aa1\") " pod="openstack/root-account-create-update-288zd" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.877473 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-operator-scripts\") pod \"root-account-create-update-288zd\" (UID: \"0aee08af-96be-4e1a-8e1b-94cec0af8aa1\") " pod="openstack/root-account-create-update-288zd" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.895994 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktsbw\" (UniqueName: \"kubernetes.io/projected/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-kube-api-access-ktsbw\") pod \"root-account-create-update-288zd\" (UID: \"0aee08af-96be-4e1a-8e1b-94cec0af8aa1\") " pod="openstack/root-account-create-update-288zd" Feb 02 17:07:40 crc kubenswrapper[4835]: I0202 17:07:40.917881 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-288zd" Feb 02 17:07:41 crc kubenswrapper[4835]: I0202 17:07:41.382504 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-288zd"] Feb 02 17:07:41 crc kubenswrapper[4835]: W0202 17:07:41.385981 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0aee08af_96be_4e1a_8e1b_94cec0af8aa1.slice/crio-429d6a686c4f894c328a2e0e0d463fe452bcfc3290c1251a0759be960f59fde3 WatchSource:0}: Error finding container 429d6a686c4f894c328a2e0e0d463fe452bcfc3290c1251a0759be960f59fde3: Status 404 returned error can't find the container with id 429d6a686c4f894c328a2e0e0d463fe452bcfc3290c1251a0759be960f59fde3 Feb 02 17:07:41 crc kubenswrapper[4835]: I0202 17:07:41.507912 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-288zd" event={"ID":"0aee08af-96be-4e1a-8e1b-94cec0af8aa1","Type":"ContainerStarted","Data":"429d6a686c4f894c328a2e0e0d463fe452bcfc3290c1251a0759be960f59fde3"} Feb 02 17:07:42 crc kubenswrapper[4835]: I0202 17:07:42.516780 4835 generic.go:334] "Generic (PLEG): container finished" podID="0aee08af-96be-4e1a-8e1b-94cec0af8aa1" containerID="4efd4654e3ef3dd85035cf5585c8400033aabedc0f219ff97315630e8ab032ea" exitCode=0 Feb 02 17:07:42 crc kubenswrapper[4835]: I0202 17:07:42.516827 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-288zd" event={"ID":"0aee08af-96be-4e1a-8e1b-94cec0af8aa1","Type":"ContainerDied","Data":"4efd4654e3ef3dd85035cf5585c8400033aabedc0f219ff97315630e8ab032ea"} Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.156613 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-mswm4"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.158468 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mswm4" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.169962 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-mswm4"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.254268 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-174f-account-create-update-g7l2c"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.255333 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-174f-account-create-update-g7l2c" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.258151 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.261959 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-174f-account-create-update-g7l2c"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.320218 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvkgs\" (UniqueName: \"kubernetes.io/projected/479d7565-e26e-40f3-8438-080b9a02d861-kube-api-access-kvkgs\") pod \"keystone-db-create-mswm4\" (UID: \"479d7565-e26e-40f3-8438-080b9a02d861\") " pod="openstack/keystone-db-create-mswm4" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.320483 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/479d7565-e26e-40f3-8438-080b9a02d861-operator-scripts\") pod \"keystone-db-create-mswm4\" (UID: \"479d7565-e26e-40f3-8438-080b9a02d861\") " pod="openstack/keystone-db-create-mswm4" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.422182 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/479d7565-e26e-40f3-8438-080b9a02d861-operator-scripts\") pod \"keystone-db-create-mswm4\" (UID: \"479d7565-e26e-40f3-8438-080b9a02d861\") " pod="openstack/keystone-db-create-mswm4" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.422290 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bs2x\" (UniqueName: \"kubernetes.io/projected/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-kube-api-access-5bs2x\") pod \"keystone-174f-account-create-update-g7l2c\" (UID: \"3cfb23ae-a0ae-49be-94f9-b802f11a7b50\") " pod="openstack/keystone-174f-account-create-update-g7l2c" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.422332 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvkgs\" (UniqueName: \"kubernetes.io/projected/479d7565-e26e-40f3-8438-080b9a02d861-kube-api-access-kvkgs\") pod \"keystone-db-create-mswm4\" (UID: \"479d7565-e26e-40f3-8438-080b9a02d861\") " pod="openstack/keystone-db-create-mswm4" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.422372 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-operator-scripts\") pod \"keystone-174f-account-create-update-g7l2c\" (UID: \"3cfb23ae-a0ae-49be-94f9-b802f11a7b50\") " pod="openstack/keystone-174f-account-create-update-g7l2c" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.423106 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/479d7565-e26e-40f3-8438-080b9a02d861-operator-scripts\") pod \"keystone-db-create-mswm4\" (UID: \"479d7565-e26e-40f3-8438-080b9a02d861\") " pod="openstack/keystone-db-create-mswm4" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.450732 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvkgs\" (UniqueName: \"kubernetes.io/projected/479d7565-e26e-40f3-8438-080b9a02d861-kube-api-access-kvkgs\") pod \"keystone-db-create-mswm4\" (UID: \"479d7565-e26e-40f3-8438-080b9a02d861\") " pod="openstack/keystone-db-create-mswm4" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.460139 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-fcqd8"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.461225 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fcqd8" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.466122 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-fcqd8"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.503528 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mswm4" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.527800 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bs2x\" (UniqueName: \"kubernetes.io/projected/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-kube-api-access-5bs2x\") pod \"keystone-174f-account-create-update-g7l2c\" (UID: \"3cfb23ae-a0ae-49be-94f9-b802f11a7b50\") " pod="openstack/keystone-174f-account-create-update-g7l2c" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.527889 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-operator-scripts\") pod \"keystone-174f-account-create-update-g7l2c\" (UID: \"3cfb23ae-a0ae-49be-94f9-b802f11a7b50\") " pod="openstack/keystone-174f-account-create-update-g7l2c" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.532190 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-operator-scripts\") pod \"keystone-174f-account-create-update-g7l2c\" (UID: \"3cfb23ae-a0ae-49be-94f9-b802f11a7b50\") " pod="openstack/keystone-174f-account-create-update-g7l2c" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.549648 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bs2x\" (UniqueName: \"kubernetes.io/projected/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-kube-api-access-5bs2x\") pod \"keystone-174f-account-create-update-g7l2c\" (UID: \"3cfb23ae-a0ae-49be-94f9-b802f11a7b50\") " pod="openstack/keystone-174f-account-create-update-g7l2c" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.552076 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-cce3-account-create-update-6846w"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.553170 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cce3-account-create-update-6846w" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.559516 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.578701 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-cce3-account-create-update-6846w"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.582479 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-174f-account-create-update-g7l2c" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.629998 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcvck\" (UniqueName: \"kubernetes.io/projected/2ff49264-c189-4ebe-88c5-35845a0a5157-kube-api-access-jcvck\") pod \"placement-db-create-fcqd8\" (UID: \"2ff49264-c189-4ebe-88c5-35845a0a5157\") " pod="openstack/placement-db-create-fcqd8" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.630199 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ff49264-c189-4ebe-88c5-35845a0a5157-operator-scripts\") pod \"placement-db-create-fcqd8\" (UID: \"2ff49264-c189-4ebe-88c5-35845a0a5157\") " pod="openstack/placement-db-create-fcqd8" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.719699 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-tpcsd"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.720642 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tpcsd" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.731798 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwrl2\" (UniqueName: \"kubernetes.io/projected/a7b6b459-99e6-4462-a102-d29647f7b1fd-kube-api-access-gwrl2\") pod \"placement-cce3-account-create-update-6846w\" (UID: \"a7b6b459-99e6-4462-a102-d29647f7b1fd\") " pod="openstack/placement-cce3-account-create-update-6846w" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.731891 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcvck\" (UniqueName: \"kubernetes.io/projected/2ff49264-c189-4ebe-88c5-35845a0a5157-kube-api-access-jcvck\") pod \"placement-db-create-fcqd8\" (UID: \"2ff49264-c189-4ebe-88c5-35845a0a5157\") " pod="openstack/placement-db-create-fcqd8" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.731937 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7b6b459-99e6-4462-a102-d29647f7b1fd-operator-scripts\") pod \"placement-cce3-account-create-update-6846w\" (UID: \"a7b6b459-99e6-4462-a102-d29647f7b1fd\") " pod="openstack/placement-cce3-account-create-update-6846w" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.731970 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ff49264-c189-4ebe-88c5-35845a0a5157-operator-scripts\") pod \"placement-db-create-fcqd8\" (UID: \"2ff49264-c189-4ebe-88c5-35845a0a5157\") " pod="openstack/placement-db-create-fcqd8" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.732862 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ff49264-c189-4ebe-88c5-35845a0a5157-operator-scripts\") pod \"placement-db-create-fcqd8\" (UID: \"2ff49264-c189-4ebe-88c5-35845a0a5157\") " pod="openstack/placement-db-create-fcqd8" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.735177 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-tpcsd"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.752342 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcvck\" (UniqueName: \"kubernetes.io/projected/2ff49264-c189-4ebe-88c5-35845a0a5157-kube-api-access-jcvck\") pod \"placement-db-create-fcqd8\" (UID: \"2ff49264-c189-4ebe-88c5-35845a0a5157\") " pod="openstack/placement-db-create-fcqd8" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.796858 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fcqd8" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.833568 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-operator-scripts\") pod \"glance-db-create-tpcsd\" (UID: \"ab0419cc-55bc-4d07-ab78-68d1fb8b639a\") " pod="openstack/glance-db-create-tpcsd" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.833672 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwrl2\" (UniqueName: \"kubernetes.io/projected/a7b6b459-99e6-4462-a102-d29647f7b1fd-kube-api-access-gwrl2\") pod \"placement-cce3-account-create-update-6846w\" (UID: \"a7b6b459-99e6-4462-a102-d29647f7b1fd\") " pod="openstack/placement-cce3-account-create-update-6846w" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.833778 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7b6b459-99e6-4462-a102-d29647f7b1fd-operator-scripts\") pod \"placement-cce3-account-create-update-6846w\" (UID: \"a7b6b459-99e6-4462-a102-d29647f7b1fd\") " pod="openstack/placement-cce3-account-create-update-6846w" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.833810 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj65r\" (UniqueName: \"kubernetes.io/projected/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-kube-api-access-zj65r\") pod \"glance-db-create-tpcsd\" (UID: \"ab0419cc-55bc-4d07-ab78-68d1fb8b639a\") " pod="openstack/glance-db-create-tpcsd" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.835455 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7b6b459-99e6-4462-a102-d29647f7b1fd-operator-scripts\") pod \"placement-cce3-account-create-update-6846w\" (UID: \"a7b6b459-99e6-4462-a102-d29647f7b1fd\") " pod="openstack/placement-cce3-account-create-update-6846w" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.851927 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-272e-account-create-update-j8f9f"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.852065 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-288zd" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.852383 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aee08af-96be-4e1a-8e1b-94cec0af8aa1" containerName="mariadb-account-create-update" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.852925 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-272e-account-create-update-j8f9f" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.863367 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.867887 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwrl2\" (UniqueName: \"kubernetes.io/projected/a7b6b459-99e6-4462-a102-d29647f7b1fd-kube-api-access-gwrl2\") pod \"placement-cce3-account-create-update-6846w\" (UID: \"a7b6b459-99e6-4462-a102-d29647f7b1fd\") " pod="openstack/placement-cce3-account-create-update-6846w" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.871130 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-272e-account-create-update-j8f9f"] Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.938249 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj65r\" (UniqueName: \"kubernetes.io/projected/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-kube-api-access-zj65r\") pod \"glance-db-create-tpcsd\" (UID: \"ab0419cc-55bc-4d07-ab78-68d1fb8b639a\") " pod="openstack/glance-db-create-tpcsd" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.938382 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-operator-scripts\") pod \"glance-db-create-tpcsd\" (UID: \"ab0419cc-55bc-4d07-ab78-68d1fb8b639a\") " pod="openstack/glance-db-create-tpcsd" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.939632 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-operator-scripts\") pod \"glance-db-create-tpcsd\" (UID: \"ab0419cc-55bc-4d07-ab78-68d1fb8b639a\") " pod="openstack/glance-db-create-tpcsd" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.954424 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj65r\" (UniqueName: \"kubernetes.io/projected/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-kube-api-access-zj65r\") pod \"glance-db-create-tpcsd\" (UID: \"ab0419cc-55bc-4d07-ab78-68d1fb8b639a\") " pod="openstack/glance-db-create-tpcsd" Feb 02 17:07:43 crc kubenswrapper[4835]: I0202 17:07:43.960988 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cce3-account-create-update-6846w" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.024915 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-mswm4"] Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.039751 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-operator-scripts\") pod \"0aee08af-96be-4e1a-8e1b-94cec0af8aa1\" (UID: \"0aee08af-96be-4e1a-8e1b-94cec0af8aa1\") " Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.039807 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktsbw\" (UniqueName: \"kubernetes.io/projected/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-kube-api-access-ktsbw\") pod \"0aee08af-96be-4e1a-8e1b-94cec0af8aa1\" (UID: \"0aee08af-96be-4e1a-8e1b-94cec0af8aa1\") " Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.040196 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz8tn\" (UniqueName: \"kubernetes.io/projected/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-kube-api-access-fz8tn\") pod \"glance-272e-account-create-update-j8f9f\" (UID: \"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b\") " pod="openstack/glance-272e-account-create-update-j8f9f" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.040263 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-operator-scripts\") pod \"glance-272e-account-create-update-j8f9f\" (UID: \"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b\") " pod="openstack/glance-272e-account-create-update-j8f9f" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.040590 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0aee08af-96be-4e1a-8e1b-94cec0af8aa1" (UID: "0aee08af-96be-4e1a-8e1b-94cec0af8aa1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.043867 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-kube-api-access-ktsbw" (OuterVolumeSpecName: "kube-api-access-ktsbw") pod "0aee08af-96be-4e1a-8e1b-94cec0af8aa1" (UID: "0aee08af-96be-4e1a-8e1b-94cec0af8aa1"). InnerVolumeSpecName "kube-api-access-ktsbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.049217 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tpcsd" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.142759 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz8tn\" (UniqueName: \"kubernetes.io/projected/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-kube-api-access-fz8tn\") pod \"glance-272e-account-create-update-j8f9f\" (UID: \"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b\") " pod="openstack/glance-272e-account-create-update-j8f9f" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.142818 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-operator-scripts\") pod \"glance-272e-account-create-update-j8f9f\" (UID: \"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b\") " pod="openstack/glance-272e-account-create-update-j8f9f" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.142938 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.142951 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktsbw\" (UniqueName: \"kubernetes.io/projected/0aee08af-96be-4e1a-8e1b-94cec0af8aa1-kube-api-access-ktsbw\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.143587 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-operator-scripts\") pod \"glance-272e-account-create-update-j8f9f\" (UID: \"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b\") " pod="openstack/glance-272e-account-create-update-j8f9f" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.153694 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-174f-account-create-update-g7l2c"] Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.175962 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz8tn\" (UniqueName: \"kubernetes.io/projected/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-kube-api-access-fz8tn\") pod \"glance-272e-account-create-update-j8f9f\" (UID: \"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b\") " pod="openstack/glance-272e-account-create-update-j8f9f" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.183874 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-272e-account-create-update-j8f9f" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.195703 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-cce3-account-create-update-6846w"] Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.256901 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-fcqd8"] Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.494996 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-tpcsd"] Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.545200 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-174f-account-create-update-g7l2c" event={"ID":"3cfb23ae-a0ae-49be-94f9-b802f11a7b50","Type":"ContainerStarted","Data":"353e3c150183deee4372a9c1d58c724c99d5e40092c7c324785b26370da1c10c"} Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.545629 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-174f-account-create-update-g7l2c" event={"ID":"3cfb23ae-a0ae-49be-94f9-b802f11a7b50","Type":"ContainerStarted","Data":"775ff3e08783fe194785ca310a0e282be6110b98191582159a9fbafa94369b78"} Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.547497 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-fcqd8" event={"ID":"2ff49264-c189-4ebe-88c5-35845a0a5157","Type":"ContainerStarted","Data":"570151b15315752bf955d300658119d5c2e2df3f9ebc1ee492d9654a2c75cf8e"} Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.547523 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-fcqd8" event={"ID":"2ff49264-c189-4ebe-88c5-35845a0a5157","Type":"ContainerStarted","Data":"beff9fcc805334fdfd41807d3a4f61ab6c5a2180f1c7a53c08c5b2e987dbbce9"} Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.550842 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cce3-account-create-update-6846w" event={"ID":"a7b6b459-99e6-4462-a102-d29647f7b1fd","Type":"ContainerStarted","Data":"7735dbf567004982e6eb0e6938ada115b76a0a70fa29279d71a560a0975a9693"} Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.550891 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cce3-account-create-update-6846w" event={"ID":"a7b6b459-99e6-4462-a102-d29647f7b1fd","Type":"ContainerStarted","Data":"d07048b8d0e332d6547249cecd40305a8ac28dbd1571baf1cd5de4bb70d4986e"} Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.552907 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-288zd" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.553897 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-288zd" event={"ID":"0aee08af-96be-4e1a-8e1b-94cec0af8aa1","Type":"ContainerDied","Data":"429d6a686c4f894c328a2e0e0d463fe452bcfc3290c1251a0759be960f59fde3"} Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.553945 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="429d6a686c4f894c328a2e0e0d463fe452bcfc3290c1251a0759be960f59fde3" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.555375 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tpcsd" event={"ID":"ab0419cc-55bc-4d07-ab78-68d1fb8b639a","Type":"ContainerStarted","Data":"73ff6c0e5f337fb2f8c27ad6f22307a1eda8905d57ecb4cbbde90e691adb0210"} Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.557354 4835 generic.go:334] "Generic (PLEG): container finished" podID="479d7565-e26e-40f3-8438-080b9a02d861" containerID="d10a6d22158cb39069ba6ab33b9566ddb04e4f6ac9db1f1800cfb5d96b39aa6c" exitCode=0 Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.557406 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-mswm4" event={"ID":"479d7565-e26e-40f3-8438-080b9a02d861","Type":"ContainerDied","Data":"d10a6d22158cb39069ba6ab33b9566ddb04e4f6ac9db1f1800cfb5d96b39aa6c"} Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.557432 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-mswm4" event={"ID":"479d7565-e26e-40f3-8438-080b9a02d861","Type":"ContainerStarted","Data":"8ae01252fa6a742c33787ab553ca4e440b3f9e38af82ffa4355a541ddcc81112"} Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.562838 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-174f-account-create-update-g7l2c" podStartSLOduration=1.562822412 podStartE2EDuration="1.562822412s" podCreationTimestamp="2026-02-02 17:07:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:07:44.559982171 +0000 UTC m=+1056.181586271" watchObservedRunningTime="2026-02-02 17:07:44.562822412 +0000 UTC m=+1056.184426492" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.581986 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-fcqd8" podStartSLOduration=1.5819660519999998 podStartE2EDuration="1.581966052s" podCreationTimestamp="2026-02-02 17:07:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:07:44.573561115 +0000 UTC m=+1056.195165195" watchObservedRunningTime="2026-02-02 17:07:44.581966052 +0000 UTC m=+1056.203570132" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.631801 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-cce3-account-create-update-6846w" podStartSLOduration=1.63178284 podStartE2EDuration="1.63178284s" podCreationTimestamp="2026-02-02 17:07:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:07:44.606715382 +0000 UTC m=+1056.228319462" watchObservedRunningTime="2026-02-02 17:07:44.63178284 +0000 UTC m=+1056.253386920" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.634729 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-272e-account-create-update-j8f9f"] Feb 02 17:07:44 crc kubenswrapper[4835]: W0202 17:07:44.640476 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8aa5f59d_3ddb_4715_ba3d_f15e5503d34b.slice/crio-971323e33a9428430cedb697ae9694788e82f3384139164c242f3520b8ff313b WatchSource:0}: Error finding container 971323e33a9428430cedb697ae9694788e82f3384139164c242f3520b8ff313b: Status 404 returned error can't find the container with id 971323e33a9428430cedb697ae9694788e82f3384139164c242f3520b8ff313b Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.869910 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.870378 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.870534 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.871307 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ed5bb6b3343a006060ae2f0f9c428cf6f417413f7227d48031553b98961dab3a"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:07:44 crc kubenswrapper[4835]: I0202 17:07:44.871476 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://ed5bb6b3343a006060ae2f0f9c428cf6f417413f7227d48031553b98961dab3a" gracePeriod=600 Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.566926 4835 generic.go:334] "Generic (PLEG): container finished" podID="8aa5f59d-3ddb-4715-ba3d-f15e5503d34b" containerID="1219a044e414b7d97fea8d4b4f2fc4ba494dbf2bfffe5f90a5336768fcfd95a0" exitCode=0 Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.567114 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-272e-account-create-update-j8f9f" event={"ID":"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b","Type":"ContainerDied","Data":"1219a044e414b7d97fea8d4b4f2fc4ba494dbf2bfffe5f90a5336768fcfd95a0"} Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.567445 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-272e-account-create-update-j8f9f" event={"ID":"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b","Type":"ContainerStarted","Data":"971323e33a9428430cedb697ae9694788e82f3384139164c242f3520b8ff313b"} Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.569592 4835 generic.go:334] "Generic (PLEG): container finished" podID="ab0419cc-55bc-4d07-ab78-68d1fb8b639a" containerID="40616d7441c01bad5e0813a99af6d05f8fb4618cda6aebdaadbd2e7f3175f629" exitCode=0 Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.569653 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tpcsd" event={"ID":"ab0419cc-55bc-4d07-ab78-68d1fb8b639a","Type":"ContainerDied","Data":"40616d7441c01bad5e0813a99af6d05f8fb4618cda6aebdaadbd2e7f3175f629"} Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.576095 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="ed5bb6b3343a006060ae2f0f9c428cf6f417413f7227d48031553b98961dab3a" exitCode=0 Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.576161 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"ed5bb6b3343a006060ae2f0f9c428cf6f417413f7227d48031553b98961dab3a"} Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.576186 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"d1d745ca83c0b5216f384fd386fa76fd9b97cc7c8d5d53ff568a50a85b837b86"} Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.576203 4835 scope.go:117] "RemoveContainer" containerID="e554e578a75a247804791314d623e05f6091a40930f6f9c01d754a6a53db79cc" Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.588250 4835 generic.go:334] "Generic (PLEG): container finished" podID="3cfb23ae-a0ae-49be-94f9-b802f11a7b50" containerID="353e3c150183deee4372a9c1d58c724c99d5e40092c7c324785b26370da1c10c" exitCode=0 Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.588600 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-174f-account-create-update-g7l2c" event={"ID":"3cfb23ae-a0ae-49be-94f9-b802f11a7b50","Type":"ContainerDied","Data":"353e3c150183deee4372a9c1d58c724c99d5e40092c7c324785b26370da1c10c"} Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.597039 4835 generic.go:334] "Generic (PLEG): container finished" podID="2ff49264-c189-4ebe-88c5-35845a0a5157" containerID="570151b15315752bf955d300658119d5c2e2df3f9ebc1ee492d9654a2c75cf8e" exitCode=0 Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.597202 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-fcqd8" event={"ID":"2ff49264-c189-4ebe-88c5-35845a0a5157","Type":"ContainerDied","Data":"570151b15315752bf955d300658119d5c2e2df3f9ebc1ee492d9654a2c75cf8e"} Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.602944 4835 generic.go:334] "Generic (PLEG): container finished" podID="a7b6b459-99e6-4462-a102-d29647f7b1fd" containerID="7735dbf567004982e6eb0e6938ada115b76a0a70fa29279d71a560a0975a9693" exitCode=0 Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.603232 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cce3-account-create-update-6846w" event={"ID":"a7b6b459-99e6-4462-a102-d29647f7b1fd","Type":"ContainerDied","Data":"7735dbf567004982e6eb0e6938ada115b76a0a70fa29279d71a560a0975a9693"} Feb 02 17:07:45 crc kubenswrapper[4835]: I0202 17:07:45.932471 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mswm4" Feb 02 17:07:46 crc kubenswrapper[4835]: I0202 17:07:46.071992 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvkgs\" (UniqueName: \"kubernetes.io/projected/479d7565-e26e-40f3-8438-080b9a02d861-kube-api-access-kvkgs\") pod \"479d7565-e26e-40f3-8438-080b9a02d861\" (UID: \"479d7565-e26e-40f3-8438-080b9a02d861\") " Feb 02 17:07:46 crc kubenswrapper[4835]: I0202 17:07:46.072055 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/479d7565-e26e-40f3-8438-080b9a02d861-operator-scripts\") pod \"479d7565-e26e-40f3-8438-080b9a02d861\" (UID: \"479d7565-e26e-40f3-8438-080b9a02d861\") " Feb 02 17:07:46 crc kubenswrapper[4835]: I0202 17:07:46.073026 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/479d7565-e26e-40f3-8438-080b9a02d861-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "479d7565-e26e-40f3-8438-080b9a02d861" (UID: "479d7565-e26e-40f3-8438-080b9a02d861"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:46 crc kubenswrapper[4835]: I0202 17:07:46.078255 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/479d7565-e26e-40f3-8438-080b9a02d861-kube-api-access-kvkgs" (OuterVolumeSpecName: "kube-api-access-kvkgs") pod "479d7565-e26e-40f3-8438-080b9a02d861" (UID: "479d7565-e26e-40f3-8438-080b9a02d861"). InnerVolumeSpecName "kube-api-access-kvkgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:46 crc kubenswrapper[4835]: I0202 17:07:46.174163 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvkgs\" (UniqueName: \"kubernetes.io/projected/479d7565-e26e-40f3-8438-080b9a02d861-kube-api-access-kvkgs\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:46 crc kubenswrapper[4835]: I0202 17:07:46.174202 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/479d7565-e26e-40f3-8438-080b9a02d861-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:46 crc kubenswrapper[4835]: I0202 17:07:46.615522 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mswm4" Feb 02 17:07:46 crc kubenswrapper[4835]: I0202 17:07:46.616498 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-mswm4" event={"ID":"479d7565-e26e-40f3-8438-080b9a02d861","Type":"ContainerDied","Data":"8ae01252fa6a742c33787ab553ca4e440b3f9e38af82ffa4355a541ddcc81112"} Feb 02 17:07:46 crc kubenswrapper[4835]: I0202 17:07:46.616540 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ae01252fa6a742c33787ab553ca4e440b3f9e38af82ffa4355a541ddcc81112" Feb 02 17:07:46 crc kubenswrapper[4835]: I0202 17:07:46.944032 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-288zd"] Feb 02 17:07:46 crc kubenswrapper[4835]: I0202 17:07:46.958650 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-288zd"] Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.022648 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fcqd8" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.193268 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ff49264-c189-4ebe-88c5-35845a0a5157-operator-scripts\") pod \"2ff49264-c189-4ebe-88c5-35845a0a5157\" (UID: \"2ff49264-c189-4ebe-88c5-35845a0a5157\") " Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.193445 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcvck\" (UniqueName: \"kubernetes.io/projected/2ff49264-c189-4ebe-88c5-35845a0a5157-kube-api-access-jcvck\") pod \"2ff49264-c189-4ebe-88c5-35845a0a5157\" (UID: \"2ff49264-c189-4ebe-88c5-35845a0a5157\") " Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.194400 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ff49264-c189-4ebe-88c5-35845a0a5157-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2ff49264-c189-4ebe-88c5-35845a0a5157" (UID: "2ff49264-c189-4ebe-88c5-35845a0a5157"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.198587 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ff49264-c189-4ebe-88c5-35845a0a5157-kube-api-access-jcvck" (OuterVolumeSpecName: "kube-api-access-jcvck") pod "2ff49264-c189-4ebe-88c5-35845a0a5157" (UID: "2ff49264-c189-4ebe-88c5-35845a0a5157"). InnerVolumeSpecName "kube-api-access-jcvck". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.204246 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aee08af-96be-4e1a-8e1b-94cec0af8aa1" path="/var/lib/kubelet/pods/0aee08af-96be-4e1a-8e1b-94cec0af8aa1/volumes" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.255358 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-272e-account-create-update-j8f9f" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.262470 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tpcsd" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.269786 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-174f-account-create-update-g7l2c" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.282426 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cce3-account-create-update-6846w" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.297423 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ff49264-c189-4ebe-88c5-35845a0a5157-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.297485 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcvck\" (UniqueName: \"kubernetes.io/projected/2ff49264-c189-4ebe-88c5-35845a0a5157-kube-api-access-jcvck\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.398407 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bs2x\" (UniqueName: \"kubernetes.io/projected/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-kube-api-access-5bs2x\") pod \"3cfb23ae-a0ae-49be-94f9-b802f11a7b50\" (UID: \"3cfb23ae-a0ae-49be-94f9-b802f11a7b50\") " Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.398508 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-operator-scripts\") pod \"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b\" (UID: \"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b\") " Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.398641 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fz8tn\" (UniqueName: \"kubernetes.io/projected/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-kube-api-access-fz8tn\") pod \"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b\" (UID: \"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b\") " Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.398662 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-operator-scripts\") pod \"3cfb23ae-a0ae-49be-94f9-b802f11a7b50\" (UID: \"3cfb23ae-a0ae-49be-94f9-b802f11a7b50\") " Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.398682 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7b6b459-99e6-4462-a102-d29647f7b1fd-operator-scripts\") pod \"a7b6b459-99e6-4462-a102-d29647f7b1fd\" (UID: \"a7b6b459-99e6-4462-a102-d29647f7b1fd\") " Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.398705 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zj65r\" (UniqueName: \"kubernetes.io/projected/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-kube-api-access-zj65r\") pod \"ab0419cc-55bc-4d07-ab78-68d1fb8b639a\" (UID: \"ab0419cc-55bc-4d07-ab78-68d1fb8b639a\") " Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.398744 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-operator-scripts\") pod \"ab0419cc-55bc-4d07-ab78-68d1fb8b639a\" (UID: \"ab0419cc-55bc-4d07-ab78-68d1fb8b639a\") " Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.398801 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwrl2\" (UniqueName: \"kubernetes.io/projected/a7b6b459-99e6-4462-a102-d29647f7b1fd-kube-api-access-gwrl2\") pod \"a7b6b459-99e6-4462-a102-d29647f7b1fd\" (UID: \"a7b6b459-99e6-4462-a102-d29647f7b1fd\") " Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.399406 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8aa5f59d-3ddb-4715-ba3d-f15e5503d34b" (UID: "8aa5f59d-3ddb-4715-ba3d-f15e5503d34b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.399611 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ab0419cc-55bc-4d07-ab78-68d1fb8b639a" (UID: "ab0419cc-55bc-4d07-ab78-68d1fb8b639a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.399747 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3cfb23ae-a0ae-49be-94f9-b802f11a7b50" (UID: "3cfb23ae-a0ae-49be-94f9-b802f11a7b50"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.399930 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7b6b459-99e6-4462-a102-d29647f7b1fd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a7b6b459-99e6-4462-a102-d29647f7b1fd" (UID: "a7b6b459-99e6-4462-a102-d29647f7b1fd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.403102 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-kube-api-access-fz8tn" (OuterVolumeSpecName: "kube-api-access-fz8tn") pod "8aa5f59d-3ddb-4715-ba3d-f15e5503d34b" (UID: "8aa5f59d-3ddb-4715-ba3d-f15e5503d34b"). InnerVolumeSpecName "kube-api-access-fz8tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.403602 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7b6b459-99e6-4462-a102-d29647f7b1fd-kube-api-access-gwrl2" (OuterVolumeSpecName: "kube-api-access-gwrl2") pod "a7b6b459-99e6-4462-a102-d29647f7b1fd" (UID: "a7b6b459-99e6-4462-a102-d29647f7b1fd"). InnerVolumeSpecName "kube-api-access-gwrl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.403587 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-kube-api-access-5bs2x" (OuterVolumeSpecName: "kube-api-access-5bs2x") pod "3cfb23ae-a0ae-49be-94f9-b802f11a7b50" (UID: "3cfb23ae-a0ae-49be-94f9-b802f11a7b50"). InnerVolumeSpecName "kube-api-access-5bs2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.404527 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-kube-api-access-zj65r" (OuterVolumeSpecName: "kube-api-access-zj65r") pod "ab0419cc-55bc-4d07-ab78-68d1fb8b639a" (UID: "ab0419cc-55bc-4d07-ab78-68d1fb8b639a"). InnerVolumeSpecName "kube-api-access-zj65r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.500574 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwrl2\" (UniqueName: \"kubernetes.io/projected/a7b6b459-99e6-4462-a102-d29647f7b1fd-kube-api-access-gwrl2\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.500613 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bs2x\" (UniqueName: \"kubernetes.io/projected/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-kube-api-access-5bs2x\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.500627 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.500638 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fz8tn\" (UniqueName: \"kubernetes.io/projected/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b-kube-api-access-fz8tn\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.500649 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cfb23ae-a0ae-49be-94f9-b802f11a7b50-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.500660 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7b6b459-99e6-4462-a102-d29647f7b1fd-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.500670 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zj65r\" (UniqueName: \"kubernetes.io/projected/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-kube-api-access-zj65r\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.500681 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab0419cc-55bc-4d07-ab78-68d1fb8b639a-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.624716 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-fcqd8" event={"ID":"2ff49264-c189-4ebe-88c5-35845a0a5157","Type":"ContainerDied","Data":"beff9fcc805334fdfd41807d3a4f61ab6c5a2180f1c7a53c08c5b2e987dbbce9"} Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.624803 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="beff9fcc805334fdfd41807d3a4f61ab6c5a2180f1c7a53c08c5b2e987dbbce9" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.624868 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fcqd8" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.632521 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cce3-account-create-update-6846w" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.632534 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cce3-account-create-update-6846w" event={"ID":"a7b6b459-99e6-4462-a102-d29647f7b1fd","Type":"ContainerDied","Data":"d07048b8d0e332d6547249cecd40305a8ac28dbd1571baf1cd5de4bb70d4986e"} Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.632958 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d07048b8d0e332d6547249cecd40305a8ac28dbd1571baf1cd5de4bb70d4986e" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.636038 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-272e-account-create-update-j8f9f" event={"ID":"8aa5f59d-3ddb-4715-ba3d-f15e5503d34b","Type":"ContainerDied","Data":"971323e33a9428430cedb697ae9694788e82f3384139164c242f3520b8ff313b"} Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.636223 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="971323e33a9428430cedb697ae9694788e82f3384139164c242f3520b8ff313b" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.636421 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-272e-account-create-update-j8f9f" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.638773 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tpcsd" event={"ID":"ab0419cc-55bc-4d07-ab78-68d1fb8b639a","Type":"ContainerDied","Data":"73ff6c0e5f337fb2f8c27ad6f22307a1eda8905d57ecb4cbbde90e691adb0210"} Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.639693 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73ff6c0e5f337fb2f8c27ad6f22307a1eda8905d57ecb4cbbde90e691adb0210" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.638786 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tpcsd" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.642726 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-174f-account-create-update-g7l2c" event={"ID":"3cfb23ae-a0ae-49be-94f9-b802f11a7b50","Type":"ContainerDied","Data":"775ff3e08783fe194785ca310a0e282be6110b98191582159a9fbafa94369b78"} Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.642775 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="775ff3e08783fe194785ca310a0e282be6110b98191582159a9fbafa94369b78" Feb 02 17:07:47 crc kubenswrapper[4835]: I0202 17:07:47.642842 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-174f-account-create-update-g7l2c" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994064 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-fnpz7"] Feb 02 17:07:48 crc kubenswrapper[4835]: E0202 17:07:48.994603 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cfb23ae-a0ae-49be-94f9-b802f11a7b50" containerName="mariadb-account-create-update" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994622 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cfb23ae-a0ae-49be-94f9-b802f11a7b50" containerName="mariadb-account-create-update" Feb 02 17:07:48 crc kubenswrapper[4835]: E0202 17:07:48.994644 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="479d7565-e26e-40f3-8438-080b9a02d861" containerName="mariadb-database-create" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994652 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="479d7565-e26e-40f3-8438-080b9a02d861" containerName="mariadb-database-create" Feb 02 17:07:48 crc kubenswrapper[4835]: E0202 17:07:48.994669 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ff49264-c189-4ebe-88c5-35845a0a5157" containerName="mariadb-database-create" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994676 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ff49264-c189-4ebe-88c5-35845a0a5157" containerName="mariadb-database-create" Feb 02 17:07:48 crc kubenswrapper[4835]: E0202 17:07:48.994696 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab0419cc-55bc-4d07-ab78-68d1fb8b639a" containerName="mariadb-database-create" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994704 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab0419cc-55bc-4d07-ab78-68d1fb8b639a" containerName="mariadb-database-create" Feb 02 17:07:48 crc kubenswrapper[4835]: E0202 17:07:48.994720 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aa5f59d-3ddb-4715-ba3d-f15e5503d34b" containerName="mariadb-account-create-update" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994729 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aa5f59d-3ddb-4715-ba3d-f15e5503d34b" containerName="mariadb-account-create-update" Feb 02 17:07:48 crc kubenswrapper[4835]: E0202 17:07:48.994741 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aee08af-96be-4e1a-8e1b-94cec0af8aa1" containerName="mariadb-account-create-update" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994749 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aee08af-96be-4e1a-8e1b-94cec0af8aa1" containerName="mariadb-account-create-update" Feb 02 17:07:48 crc kubenswrapper[4835]: E0202 17:07:48.994779 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7b6b459-99e6-4462-a102-d29647f7b1fd" containerName="mariadb-account-create-update" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994787 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7b6b459-99e6-4462-a102-d29647f7b1fd" containerName="mariadb-account-create-update" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994953 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cfb23ae-a0ae-49be-94f9-b802f11a7b50" containerName="mariadb-account-create-update" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994966 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7b6b459-99e6-4462-a102-d29647f7b1fd" containerName="mariadb-account-create-update" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994983 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="479d7565-e26e-40f3-8438-080b9a02d861" containerName="mariadb-database-create" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.994993 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aa5f59d-3ddb-4715-ba3d-f15e5503d34b" containerName="mariadb-account-create-update" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.995002 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab0419cc-55bc-4d07-ab78-68d1fb8b639a" containerName="mariadb-database-create" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.995013 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ff49264-c189-4ebe-88c5-35845a0a5157" containerName="mariadb-database-create" Feb 02 17:07:48 crc kubenswrapper[4835]: I0202 17:07:48.995694 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.001965 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-pfsqd" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.003183 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.008130 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-fnpz7"] Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.128119 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-db-sync-config-data\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.128175 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlvzp\" (UniqueName: \"kubernetes.io/projected/db8f91d5-436a-43f3-b131-5594fb4904cb-kube-api-access-vlvzp\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.128249 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-config-data\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.128377 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-combined-ca-bundle\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.229217 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-db-sync-config-data\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.229284 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlvzp\" (UniqueName: \"kubernetes.io/projected/db8f91d5-436a-43f3-b131-5594fb4904cb-kube-api-access-vlvzp\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.229313 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-config-data\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.229383 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-combined-ca-bundle\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.232647 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-combined-ca-bundle\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.233190 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-config-data\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.233806 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-db-sync-config-data\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.253039 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlvzp\" (UniqueName: \"kubernetes.io/projected/db8f91d5-436a-43f3-b131-5594fb4904cb-kube-api-access-vlvzp\") pod \"glance-db-sync-fnpz7\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.309605 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fnpz7" Feb 02 17:07:49 crc kubenswrapper[4835]: I0202 17:07:49.840984 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-fnpz7"] Feb 02 17:07:50 crc kubenswrapper[4835]: I0202 17:07:50.671003 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fnpz7" event={"ID":"db8f91d5-436a-43f3-b131-5594fb4904cb","Type":"ContainerStarted","Data":"28d9a4ae9bfd635c5640ef7824a924e323a0c42e9c6582ac165d1e47bec083e4"} Feb 02 17:07:51 crc kubenswrapper[4835]: I0202 17:07:51.943621 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-zgxpq"] Feb 02 17:07:51 crc kubenswrapper[4835]: I0202 17:07:51.944775 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zgxpq" Feb 02 17:07:51 crc kubenswrapper[4835]: I0202 17:07:51.947029 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 02 17:07:51 crc kubenswrapper[4835]: I0202 17:07:51.954666 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-zgxpq"] Feb 02 17:07:52 crc kubenswrapper[4835]: I0202 17:07:52.086799 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-operator-scripts\") pod \"root-account-create-update-zgxpq\" (UID: \"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42\") " pod="openstack/root-account-create-update-zgxpq" Feb 02 17:07:52 crc kubenswrapper[4835]: I0202 17:07:52.087122 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmrq4\" (UniqueName: \"kubernetes.io/projected/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-kube-api-access-dmrq4\") pod \"root-account-create-update-zgxpq\" (UID: \"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42\") " pod="openstack/root-account-create-update-zgxpq" Feb 02 17:07:52 crc kubenswrapper[4835]: I0202 17:07:52.188881 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-operator-scripts\") pod \"root-account-create-update-zgxpq\" (UID: \"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42\") " pod="openstack/root-account-create-update-zgxpq" Feb 02 17:07:52 crc kubenswrapper[4835]: I0202 17:07:52.188964 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmrq4\" (UniqueName: \"kubernetes.io/projected/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-kube-api-access-dmrq4\") pod \"root-account-create-update-zgxpq\" (UID: \"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42\") " pod="openstack/root-account-create-update-zgxpq" Feb 02 17:07:52 crc kubenswrapper[4835]: I0202 17:07:52.190105 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-operator-scripts\") pod \"root-account-create-update-zgxpq\" (UID: \"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42\") " pod="openstack/root-account-create-update-zgxpq" Feb 02 17:07:52 crc kubenswrapper[4835]: I0202 17:07:52.212012 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmrq4\" (UniqueName: \"kubernetes.io/projected/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-kube-api-access-dmrq4\") pod \"root-account-create-update-zgxpq\" (UID: \"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42\") " pod="openstack/root-account-create-update-zgxpq" Feb 02 17:07:52 crc kubenswrapper[4835]: I0202 17:07:52.263008 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zgxpq" Feb 02 17:07:52 crc kubenswrapper[4835]: W0202 17:07:52.728115 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd200c16c_b8ab_4f2f_bc1e_56b0a1888c42.slice/crio-237b7c8e912a98bd3ace1ba0151a8f46b26d439a14fff77684bb68b8ddb95acf WatchSource:0}: Error finding container 237b7c8e912a98bd3ace1ba0151a8f46b26d439a14fff77684bb68b8ddb95acf: Status 404 returned error can't find the container with id 237b7c8e912a98bd3ace1ba0151a8f46b26d439a14fff77684bb68b8ddb95acf Feb 02 17:07:52 crc kubenswrapper[4835]: I0202 17:07:52.737864 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-zgxpq"] Feb 02 17:07:53 crc kubenswrapper[4835]: I0202 17:07:53.132699 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Feb 02 17:07:53 crc kubenswrapper[4835]: I0202 17:07:53.709375 4835 generic.go:334] "Generic (PLEG): container finished" podID="d200c16c-b8ab-4f2f-bc1e-56b0a1888c42" containerID="e62c5246d92814a41dd7dbe79d140c7b49283ada25eb31ffb8c267dc78a676b9" exitCode=0 Feb 02 17:07:53 crc kubenswrapper[4835]: I0202 17:07:53.709428 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-zgxpq" event={"ID":"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42","Type":"ContainerDied","Data":"e62c5246d92814a41dd7dbe79d140c7b49283ada25eb31ffb8c267dc78a676b9"} Feb 02 17:07:53 crc kubenswrapper[4835]: I0202 17:07:53.709460 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-zgxpq" event={"ID":"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42","Type":"ContainerStarted","Data":"237b7c8e912a98bd3ace1ba0151a8f46b26d439a14fff77684bb68b8ddb95acf"} Feb 02 17:07:57 crc kubenswrapper[4835]: I0202 17:07:57.747348 4835 generic.go:334] "Generic (PLEG): container finished" podID="8ddf6049-a0d8-429c-b8ce-b52702f4ee60" containerID="d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7" exitCode=0 Feb 02 17:07:57 crc kubenswrapper[4835]: I0202 17:07:57.747401 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8ddf6049-a0d8-429c-b8ce-b52702f4ee60","Type":"ContainerDied","Data":"d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7"} Feb 02 17:07:57 crc kubenswrapper[4835]: I0202 17:07:57.751061 4835 generic.go:334] "Generic (PLEG): container finished" podID="5cb4e8f7-3881-4fef-9056-0e2f149aab21" containerID="62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4" exitCode=0 Feb 02 17:07:57 crc kubenswrapper[4835]: I0202 17:07:57.751114 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5cb4e8f7-3881-4fef-9056-0e2f149aab21","Type":"ContainerDied","Data":"62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4"} Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.298663 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hxh6p" podUID="e3608c64-7b50-4a57-a0ea-578164629872" containerName="ovn-controller" probeResult="failure" output=< Feb 02 17:07:59 crc kubenswrapper[4835]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 02 17:07:59 crc kubenswrapper[4835]: > Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.324455 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.331365 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-248s6" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.542817 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-hxh6p-config-6dtl8"] Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.544302 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.546781 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.558037 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxh6p-config-6dtl8"] Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.629217 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.629266 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run-ovn\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.629319 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfczp\" (UniqueName: \"kubernetes.io/projected/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-kube-api-access-hfczp\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.629510 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-scripts\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.629572 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-additional-scripts\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.629623 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-log-ovn\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.730873 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.730941 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run-ovn\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.730989 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfczp\" (UniqueName: \"kubernetes.io/projected/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-kube-api-access-hfczp\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.731042 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-scripts\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.731073 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-additional-scripts\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.731101 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-log-ovn\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.731511 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-log-ovn\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.732325 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run-ovn\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.732641 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-additional-scripts\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.731862 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.737463 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-scripts\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.749449 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfczp\" (UniqueName: \"kubernetes.io/projected/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-kube-api-access-hfczp\") pod \"ovn-controller-hxh6p-config-6dtl8\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:07:59 crc kubenswrapper[4835]: I0202 17:07:59.861848 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.435942 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zgxpq" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.557442 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-operator-scripts\") pod \"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42\" (UID: \"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42\") " Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.558001 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmrq4\" (UniqueName: \"kubernetes.io/projected/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-kube-api-access-dmrq4\") pod \"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42\" (UID: \"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42\") " Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.558949 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d200c16c-b8ab-4f2f-bc1e-56b0a1888c42" (UID: "d200c16c-b8ab-4f2f-bc1e-56b0a1888c42"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.570580 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-kube-api-access-dmrq4" (OuterVolumeSpecName: "kube-api-access-dmrq4") pod "d200c16c-b8ab-4f2f-bc1e-56b0a1888c42" (UID: "d200c16c-b8ab-4f2f-bc1e-56b0a1888c42"). InnerVolumeSpecName "kube-api-access-dmrq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.660198 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmrq4\" (UniqueName: \"kubernetes.io/projected/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-kube-api-access-dmrq4\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.660225 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.788125 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8ddf6049-a0d8-429c-b8ce-b52702f4ee60","Type":"ContainerStarted","Data":"17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c"} Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.788348 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.792428 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-zgxpq" event={"ID":"d200c16c-b8ab-4f2f-bc1e-56b0a1888c42","Type":"ContainerDied","Data":"237b7c8e912a98bd3ace1ba0151a8f46b26d439a14fff77684bb68b8ddb95acf"} Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.792479 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="237b7c8e912a98bd3ace1ba0151a8f46b26d439a14fff77684bb68b8ddb95acf" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.792640 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zgxpq" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.806336 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5cb4e8f7-3881-4fef-9056-0e2f149aab21","Type":"ContainerStarted","Data":"c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec"} Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.806678 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.818477 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=53.687485595 podStartE2EDuration="1m2.818455299s" podCreationTimestamp="2026-02-02 17:06:59 +0000 UTC" firstStartedPulling="2026-02-02 17:07:10.970169321 +0000 UTC m=+1022.591773401" lastFinishedPulling="2026-02-02 17:07:20.101139025 +0000 UTC m=+1031.722743105" observedRunningTime="2026-02-02 17:08:01.814212889 +0000 UTC m=+1073.435816969" watchObservedRunningTime="2026-02-02 17:08:01.818455299 +0000 UTC m=+1073.440059379" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.842340 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=53.714501048 podStartE2EDuration="1m2.842323513s" podCreationTimestamp="2026-02-02 17:06:59 +0000 UTC" firstStartedPulling="2026-02-02 17:07:10.9747342 +0000 UTC m=+1022.596338280" lastFinishedPulling="2026-02-02 17:07:20.102556665 +0000 UTC m=+1031.724160745" observedRunningTime="2026-02-02 17:08:01.838202677 +0000 UTC m=+1073.459806777" watchObservedRunningTime="2026-02-02 17:08:01.842323513 +0000 UTC m=+1073.463927593" Feb 02 17:08:01 crc kubenswrapper[4835]: I0202 17:08:01.876847 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxh6p-config-6dtl8"] Feb 02 17:08:01 crc kubenswrapper[4835]: W0202 17:08:01.882393 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d5d2fcd_647a_42d7_a3a1_238f41ffc398.slice/crio-7a087766458c2551d821a4392ca79c6fcc992d2a66476ca03706c5e7959993d2 WatchSource:0}: Error finding container 7a087766458c2551d821a4392ca79c6fcc992d2a66476ca03706c5e7959993d2: Status 404 returned error can't find the container with id 7a087766458c2551d821a4392ca79c6fcc992d2a66476ca03706c5e7959993d2 Feb 02 17:08:02 crc kubenswrapper[4835]: I0202 17:08:02.813735 4835 generic.go:334] "Generic (PLEG): container finished" podID="9d5d2fcd-647a-42d7-a3a1-238f41ffc398" containerID="ce0cdf63120ce260b9d13b5cf20294ae008be1b808035648165fe5131fcd188c" exitCode=0 Feb 02 17:08:02 crc kubenswrapper[4835]: I0202 17:08:02.813790 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxh6p-config-6dtl8" event={"ID":"9d5d2fcd-647a-42d7-a3a1-238f41ffc398","Type":"ContainerDied","Data":"ce0cdf63120ce260b9d13b5cf20294ae008be1b808035648165fe5131fcd188c"} Feb 02 17:08:02 crc kubenswrapper[4835]: I0202 17:08:02.815073 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxh6p-config-6dtl8" event={"ID":"9d5d2fcd-647a-42d7-a3a1-238f41ffc398","Type":"ContainerStarted","Data":"7a087766458c2551d821a4392ca79c6fcc992d2a66476ca03706c5e7959993d2"} Feb 02 17:08:02 crc kubenswrapper[4835]: I0202 17:08:02.816534 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fnpz7" event={"ID":"db8f91d5-436a-43f3-b131-5594fb4904cb","Type":"ContainerStarted","Data":"fc29c2ee0efc4e60410c6d6c24afb1b90af415f94a587b79c6b1e48555b9c2df"} Feb 02 17:08:02 crc kubenswrapper[4835]: I0202 17:08:02.853160 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-fnpz7" podStartSLOduration=3.2656785360000002 podStartE2EDuration="14.853145242s" podCreationTimestamp="2026-02-02 17:07:48 +0000 UTC" firstStartedPulling="2026-02-02 17:07:49.850583864 +0000 UTC m=+1061.472187944" lastFinishedPulling="2026-02-02 17:08:01.43805057 +0000 UTC m=+1073.059654650" observedRunningTime="2026-02-02 17:08:02.847320718 +0000 UTC m=+1074.468924808" watchObservedRunningTime="2026-02-02 17:08:02.853145242 +0000 UTC m=+1074.474749322" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.126911 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.232897 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfczp\" (UniqueName: \"kubernetes.io/projected/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-kube-api-access-hfczp\") pod \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.233207 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-additional-scripts\") pod \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.233324 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-log-ovn\") pod \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.233351 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run-ovn\") pod \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.233403 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run\") pod \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.233458 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-scripts\") pod \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\" (UID: \"9d5d2fcd-647a-42d7-a3a1-238f41ffc398\") " Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.234089 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "9d5d2fcd-647a-42d7-a3a1-238f41ffc398" (UID: "9d5d2fcd-647a-42d7-a3a1-238f41ffc398"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.234304 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "9d5d2fcd-647a-42d7-a3a1-238f41ffc398" (UID: "9d5d2fcd-647a-42d7-a3a1-238f41ffc398"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.234335 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run" (OuterVolumeSpecName: "var-run") pod "9d5d2fcd-647a-42d7-a3a1-238f41ffc398" (UID: "9d5d2fcd-647a-42d7-a3a1-238f41ffc398"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.234774 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "9d5d2fcd-647a-42d7-a3a1-238f41ffc398" (UID: "9d5d2fcd-647a-42d7-a3a1-238f41ffc398"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.234934 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-scripts" (OuterVolumeSpecName: "scripts") pod "9d5d2fcd-647a-42d7-a3a1-238f41ffc398" (UID: "9d5d2fcd-647a-42d7-a3a1-238f41ffc398"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.259670 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-kube-api-access-hfczp" (OuterVolumeSpecName: "kube-api-access-hfczp") pod "9d5d2fcd-647a-42d7-a3a1-238f41ffc398" (UID: "9d5d2fcd-647a-42d7-a3a1-238f41ffc398"). InnerVolumeSpecName "kube-api-access-hfczp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.299560 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-hxh6p" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.335296 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.335333 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfczp\" (UniqueName: \"kubernetes.io/projected/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-kube-api-access-hfczp\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.335344 4835 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.335352 4835 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.335362 4835 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.335376 4835 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d5d2fcd-647a-42d7-a3a1-238f41ffc398-var-run\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.842148 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxh6p-config-6dtl8" event={"ID":"9d5d2fcd-647a-42d7-a3a1-238f41ffc398","Type":"ContainerDied","Data":"7a087766458c2551d821a4392ca79c6fcc992d2a66476ca03706c5e7959993d2"} Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.842465 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a087766458c2551d821a4392ca79c6fcc992d2a66476ca03706c5e7959993d2" Feb 02 17:08:04 crc kubenswrapper[4835]: I0202 17:08:04.842342 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxh6p-config-6dtl8" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.223018 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-hxh6p-config-6dtl8"] Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.234845 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-hxh6p-config-6dtl8"] Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.334022 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-hxh6p-config-jjhmv"] Feb 02 17:08:05 crc kubenswrapper[4835]: E0202 17:08:05.334412 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d200c16c-b8ab-4f2f-bc1e-56b0a1888c42" containerName="mariadb-account-create-update" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.334431 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="d200c16c-b8ab-4f2f-bc1e-56b0a1888c42" containerName="mariadb-account-create-update" Feb 02 17:08:05 crc kubenswrapper[4835]: E0202 17:08:05.334453 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d5d2fcd-647a-42d7-a3a1-238f41ffc398" containerName="ovn-config" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.334462 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d5d2fcd-647a-42d7-a3a1-238f41ffc398" containerName="ovn-config" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.334636 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="d200c16c-b8ab-4f2f-bc1e-56b0a1888c42" containerName="mariadb-account-create-update" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.334674 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d5d2fcd-647a-42d7-a3a1-238f41ffc398" containerName="ovn-config" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.335227 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.337426 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.352571 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxh6p-config-jjhmv"] Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.376711 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.376763 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-additional-scripts\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.376792 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run-ovn\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.376811 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mkkk\" (UniqueName: \"kubernetes.io/projected/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-kube-api-access-2mkkk\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.376886 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-log-ovn\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.377025 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-scripts\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.478561 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run-ovn\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.478612 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mkkk\" (UniqueName: \"kubernetes.io/projected/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-kube-api-access-2mkkk\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.478634 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-log-ovn\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.478691 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-scripts\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.478776 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.478810 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-additional-scripts\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.478945 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run-ovn\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.479238 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-log-ovn\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.479556 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-additional-scripts\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.479626 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.481829 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-scripts\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.496031 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mkkk\" (UniqueName: \"kubernetes.io/projected/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-kube-api-access-2mkkk\") pod \"ovn-controller-hxh6p-config-jjhmv\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.652654 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:05 crc kubenswrapper[4835]: I0202 17:08:05.926954 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxh6p-config-jjhmv"] Feb 02 17:08:05 crc kubenswrapper[4835]: W0202 17:08:05.930938 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded96e78f_c59b_4ff0_b863_8b7b600eb5c9.slice/crio-e9b61a73f7323a6dc9c00ef950202fbc048b836cdd42d8adc04f354115ee7bef WatchSource:0}: Error finding container e9b61a73f7323a6dc9c00ef950202fbc048b836cdd42d8adc04f354115ee7bef: Status 404 returned error can't find the container with id e9b61a73f7323a6dc9c00ef950202fbc048b836cdd42d8adc04f354115ee7bef Feb 02 17:08:06 crc kubenswrapper[4835]: I0202 17:08:06.858637 4835 generic.go:334] "Generic (PLEG): container finished" podID="ed96e78f-c59b-4ff0-b863-8b7b600eb5c9" containerID="edf347c315fbdc6e55783e0ae6fd91c66747996d27717a753f2ddc82c1af51a6" exitCode=0 Feb 02 17:08:06 crc kubenswrapper[4835]: I0202 17:08:06.859384 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxh6p-config-jjhmv" event={"ID":"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9","Type":"ContainerDied","Data":"edf347c315fbdc6e55783e0ae6fd91c66747996d27717a753f2ddc82c1af51a6"} Feb 02 17:08:06 crc kubenswrapper[4835]: I0202 17:08:06.859434 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxh6p-config-jjhmv" event={"ID":"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9","Type":"ContainerStarted","Data":"e9b61a73f7323a6dc9c00ef950202fbc048b836cdd42d8adc04f354115ee7bef"} Feb 02 17:08:07 crc kubenswrapper[4835]: I0202 17:08:07.197235 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d5d2fcd-647a-42d7-a3a1-238f41ffc398" path="/var/lib/kubelet/pods/9d5d2fcd-647a-42d7-a3a1-238f41ffc398/volumes" Feb 02 17:08:07 crc kubenswrapper[4835]: I0202 17:08:07.866584 4835 generic.go:334] "Generic (PLEG): container finished" podID="db8f91d5-436a-43f3-b131-5594fb4904cb" containerID="fc29c2ee0efc4e60410c6d6c24afb1b90af415f94a587b79c6b1e48555b9c2df" exitCode=0 Feb 02 17:08:07 crc kubenswrapper[4835]: I0202 17:08:07.866672 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fnpz7" event={"ID":"db8f91d5-436a-43f3-b131-5594fb4904cb","Type":"ContainerDied","Data":"fc29c2ee0efc4e60410c6d6c24afb1b90af415f94a587b79c6b1e48555b9c2df"} Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.145555 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.217691 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run-ovn\") pod \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.217790 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-scripts\") pod \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.217851 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mkkk\" (UniqueName: \"kubernetes.io/projected/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-kube-api-access-2mkkk\") pod \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.217856 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9" (UID: "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.217905 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run\") pod \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.217997 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-log-ovn\") pod \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.218076 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run" (OuterVolumeSpecName: "var-run") pod "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9" (UID: "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.218067 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-additional-scripts\") pod \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\" (UID: \"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9\") " Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.218157 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9" (UID: "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.218859 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9" (UID: "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.219071 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-scripts" (OuterVolumeSpecName: "scripts") pod "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9" (UID: "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.218711 4835 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.219236 4835 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.219246 4835 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.226513 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-kube-api-access-2mkkk" (OuterVolumeSpecName: "kube-api-access-2mkkk") pod "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9" (UID: "ed96e78f-c59b-4ff0-b863-8b7b600eb5c9"). InnerVolumeSpecName "kube-api-access-2mkkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.323424 4835 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.323477 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.323492 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mkkk\" (UniqueName: \"kubernetes.io/projected/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9-kube-api-access-2mkkk\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.875787 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxh6p-config-jjhmv" Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.875844 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxh6p-config-jjhmv" event={"ID":"ed96e78f-c59b-4ff0-b863-8b7b600eb5c9","Type":"ContainerDied","Data":"e9b61a73f7323a6dc9c00ef950202fbc048b836cdd42d8adc04f354115ee7bef"} Feb 02 17:08:08 crc kubenswrapper[4835]: I0202 17:08:08.875917 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9b61a73f7323a6dc9c00ef950202fbc048b836cdd42d8adc04f354115ee7bef" Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.241946 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-hxh6p-config-jjhmv"] Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.249448 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-hxh6p-config-jjhmv"] Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.286154 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fnpz7" Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.338416 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlvzp\" (UniqueName: \"kubernetes.io/projected/db8f91d5-436a-43f3-b131-5594fb4904cb-kube-api-access-vlvzp\") pod \"db8f91d5-436a-43f3-b131-5594fb4904cb\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.338622 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-config-data\") pod \"db8f91d5-436a-43f3-b131-5594fb4904cb\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.338688 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-combined-ca-bundle\") pod \"db8f91d5-436a-43f3-b131-5594fb4904cb\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.338752 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-db-sync-config-data\") pod \"db8f91d5-436a-43f3-b131-5594fb4904cb\" (UID: \"db8f91d5-436a-43f3-b131-5594fb4904cb\") " Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.345441 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "db8f91d5-436a-43f3-b131-5594fb4904cb" (UID: "db8f91d5-436a-43f3-b131-5594fb4904cb"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.347592 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db8f91d5-436a-43f3-b131-5594fb4904cb-kube-api-access-vlvzp" (OuterVolumeSpecName: "kube-api-access-vlvzp") pod "db8f91d5-436a-43f3-b131-5594fb4904cb" (UID: "db8f91d5-436a-43f3-b131-5594fb4904cb"). InnerVolumeSpecName "kube-api-access-vlvzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.359799 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db8f91d5-436a-43f3-b131-5594fb4904cb" (UID: "db8f91d5-436a-43f3-b131-5594fb4904cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.378437 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-config-data" (OuterVolumeSpecName: "config-data") pod "db8f91d5-436a-43f3-b131-5594fb4904cb" (UID: "db8f91d5-436a-43f3-b131-5594fb4904cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.441250 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.449787 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.449805 4835 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/db8f91d5-436a-43f3-b131-5594fb4904cb-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.449815 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlvzp\" (UniqueName: \"kubernetes.io/projected/db8f91d5-436a-43f3-b131-5594fb4904cb-kube-api-access-vlvzp\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.884069 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fnpz7" event={"ID":"db8f91d5-436a-43f3-b131-5594fb4904cb","Type":"ContainerDied","Data":"28d9a4ae9bfd635c5640ef7824a924e323a0c42e9c6582ac165d1e47bec083e4"} Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.884621 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28d9a4ae9bfd635c5640ef7824a924e323a0c42e9c6582ac165d1e47bec083e4" Feb 02 17:08:09 crc kubenswrapper[4835]: I0202 17:08:09.884130 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fnpz7" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.364221 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-v2pgc"] Feb 02 17:08:10 crc kubenswrapper[4835]: E0202 17:08:10.364623 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed96e78f-c59b-4ff0-b863-8b7b600eb5c9" containerName="ovn-config" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.364645 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed96e78f-c59b-4ff0-b863-8b7b600eb5c9" containerName="ovn-config" Feb 02 17:08:10 crc kubenswrapper[4835]: E0202 17:08:10.364663 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db8f91d5-436a-43f3-b131-5594fb4904cb" containerName="glance-db-sync" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.364671 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="db8f91d5-436a-43f3-b131-5594fb4904cb" containerName="glance-db-sync" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.364858 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="db8f91d5-436a-43f3-b131-5594fb4904cb" containerName="glance-db-sync" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.364892 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed96e78f-c59b-4ff0-b863-8b7b600eb5c9" containerName="ovn-config" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.365873 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.390044 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-v2pgc"] Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.465704 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.466026 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c6rz\" (UniqueName: \"kubernetes.io/projected/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-kube-api-access-9c6rz\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.466121 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.466224 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-dns-svc\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.466365 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-config\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.567394 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-config\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.568136 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-config\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.568725 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.568220 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.568828 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c6rz\" (UniqueName: \"kubernetes.io/projected/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-kube-api-access-9c6rz\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.568848 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.569147 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-dns-svc\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.569728 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-dns-svc\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.569947 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.587710 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c6rz\" (UniqueName: \"kubernetes.io/projected/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-kube-api-access-9c6rz\") pod \"dnsmasq-dns-554567b4f7-v2pgc\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:10 crc kubenswrapper[4835]: I0202 17:08:10.686803 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:11 crc kubenswrapper[4835]: I0202 17:08:11.137067 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-v2pgc"] Feb 02 17:08:11 crc kubenswrapper[4835]: W0202 17:08:11.145701 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b773f9f_674f_4e4a_98bb_dda10bd0ff9a.slice/crio-bc80aadcff0ec6141a51f4471828fc1c10335c7fc94eb1876f506170fde048f8 WatchSource:0}: Error finding container bc80aadcff0ec6141a51f4471828fc1c10335c7fc94eb1876f506170fde048f8: Status 404 returned error can't find the container with id bc80aadcff0ec6141a51f4471828fc1c10335c7fc94eb1876f506170fde048f8 Feb 02 17:08:11 crc kubenswrapper[4835]: I0202 17:08:11.201904 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed96e78f-c59b-4ff0-b863-8b7b600eb5c9" path="/var/lib/kubelet/pods/ed96e78f-c59b-4ff0-b863-8b7b600eb5c9/volumes" Feb 02 17:08:11 crc kubenswrapper[4835]: I0202 17:08:11.902179 4835 generic.go:334] "Generic (PLEG): container finished" podID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" containerID="6c16aed4a64a992ab33d2c590b447adb0d3f54d0bcb058b817cc426b475263f8" exitCode=0 Feb 02 17:08:11 crc kubenswrapper[4835]: I0202 17:08:11.902327 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" event={"ID":"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a","Type":"ContainerDied","Data":"6c16aed4a64a992ab33d2c590b447adb0d3f54d0bcb058b817cc426b475263f8"} Feb 02 17:08:11 crc kubenswrapper[4835]: I0202 17:08:11.902527 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" event={"ID":"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a","Type":"ContainerStarted","Data":"bc80aadcff0ec6141a51f4471828fc1c10335c7fc94eb1876f506170fde048f8"} Feb 02 17:08:12 crc kubenswrapper[4835]: I0202 17:08:12.913670 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" event={"ID":"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a","Type":"ContainerStarted","Data":"1c27ae4a784f19d6cdb906c903704d2078d41d83ef772405698725ded1fcfeda"} Feb 02 17:08:12 crc kubenswrapper[4835]: I0202 17:08:12.913892 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:12 crc kubenswrapper[4835]: I0202 17:08:12.938587 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" podStartSLOduration=2.938569969 podStartE2EDuration="2.938569969s" podCreationTimestamp="2026-02-02 17:08:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:08:12.932167998 +0000 UTC m=+1084.553772098" watchObservedRunningTime="2026-02-02 17:08:12.938569969 +0000 UTC m=+1084.560174049" Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.364435 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.688448 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.715477 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.758851 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-smzl5"] Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.759090 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-smzl5" podUID="0f09b478-2bc6-4e0c-958f-7cab0354d7d4" containerName="dnsmasq-dns" containerID="cri-o://b7065bb9bb259c8dd1bbf1cdd83e2744622006063b844fcaf37ccc773b58122d" gracePeriod=10 Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.837849 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-vmwqm"] Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.838978 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vmwqm" Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.859214 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-vmwqm"] Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.968157 4835 generic.go:334] "Generic (PLEG): container finished" podID="0f09b478-2bc6-4e0c-958f-7cab0354d7d4" containerID="b7065bb9bb259c8dd1bbf1cdd83e2744622006063b844fcaf37ccc773b58122d" exitCode=0 Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.968204 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-smzl5" event={"ID":"0f09b478-2bc6-4e0c-958f-7cab0354d7d4","Type":"ContainerDied","Data":"b7065bb9bb259c8dd1bbf1cdd83e2744622006063b844fcaf37ccc773b58122d"} Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.968958 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f983fbe-e05f-4c6a-8759-0d6578726175-operator-scripts\") pod \"cinder-db-create-vmwqm\" (UID: \"3f983fbe-e05f-4c6a-8759-0d6578726175\") " pod="openstack/cinder-db-create-vmwqm" Feb 02 17:08:20 crc kubenswrapper[4835]: I0202 17:08:20.969001 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmmn8\" (UniqueName: \"kubernetes.io/projected/3f983fbe-e05f-4c6a-8759-0d6578726175-kube-api-access-tmmn8\") pod \"cinder-db-create-vmwqm\" (UID: \"3f983fbe-e05f-4c6a-8759-0d6578726175\") " pod="openstack/cinder-db-create-vmwqm" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.010812 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-5705-account-create-update-mwnmq"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.011929 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5705-account-create-update-mwnmq" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.014243 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.032149 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-rgx9x"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.033268 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-rgx9x" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.042186 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-5705-account-create-update-mwnmq"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.055235 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-rgx9x"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.070377 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f983fbe-e05f-4c6a-8759-0d6578726175-operator-scripts\") pod \"cinder-db-create-vmwqm\" (UID: \"3f983fbe-e05f-4c6a-8759-0d6578726175\") " pod="openstack/cinder-db-create-vmwqm" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.070493 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmmn8\" (UniqueName: \"kubernetes.io/projected/3f983fbe-e05f-4c6a-8759-0d6578726175-kube-api-access-tmmn8\") pod \"cinder-db-create-vmwqm\" (UID: \"3f983fbe-e05f-4c6a-8759-0d6578726175\") " pod="openstack/cinder-db-create-vmwqm" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.071708 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f983fbe-e05f-4c6a-8759-0d6578726175-operator-scripts\") pod \"cinder-db-create-vmwqm\" (UID: \"3f983fbe-e05f-4c6a-8759-0d6578726175\") " pod="openstack/cinder-db-create-vmwqm" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.103868 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-krtrd"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.104772 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.111005 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.111228 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.111515 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cnmxn" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.111664 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.111767 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-h9ppw"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.112680 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h9ppw" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.126946 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-krtrd"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.134412 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmmn8\" (UniqueName: \"kubernetes.io/projected/3f983fbe-e05f-4c6a-8759-0d6578726175-kube-api-access-tmmn8\") pod \"cinder-db-create-vmwqm\" (UID: \"3f983fbe-e05f-4c6a-8759-0d6578726175\") " pod="openstack/cinder-db-create-vmwqm" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.135501 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-h9ppw"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.162603 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vmwqm" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.172166 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpn7x\" (UniqueName: \"kubernetes.io/projected/2b184729-91fb-4ebc-8ef1-b81a2aebc754-kube-api-access-rpn7x\") pod \"barbican-5705-account-create-update-mwnmq\" (UID: \"2b184729-91fb-4ebc-8ef1-b81a2aebc754\") " pod="openstack/barbican-5705-account-create-update-mwnmq" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.172219 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4kgs\" (UniqueName: \"kubernetes.io/projected/fa7c0b48-4d54-47f6-b862-42c3caeedb80-kube-api-access-l4kgs\") pod \"barbican-db-create-rgx9x\" (UID: \"fa7c0b48-4d54-47f6-b862-42c3caeedb80\") " pod="openstack/barbican-db-create-rgx9x" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.172246 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa7c0b48-4d54-47f6-b862-42c3caeedb80-operator-scripts\") pod \"barbican-db-create-rgx9x\" (UID: \"fa7c0b48-4d54-47f6-b862-42c3caeedb80\") " pod="openstack/barbican-db-create-rgx9x" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.172339 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2b184729-91fb-4ebc-8ef1-b81a2aebc754-operator-scripts\") pod \"barbican-5705-account-create-update-mwnmq\" (UID: \"2b184729-91fb-4ebc-8ef1-b81a2aebc754\") " pod="openstack/barbican-5705-account-create-update-mwnmq" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.238933 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-3d3b-account-create-update-62lns"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.241824 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3d3b-account-create-update-62lns" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.244811 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.254015 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3d3b-account-create-update-62lns"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.273579 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpn7x\" (UniqueName: \"kubernetes.io/projected/2b184729-91fb-4ebc-8ef1-b81a2aebc754-kube-api-access-rpn7x\") pod \"barbican-5705-account-create-update-mwnmq\" (UID: \"2b184729-91fb-4ebc-8ef1-b81a2aebc754\") " pod="openstack/barbican-5705-account-create-update-mwnmq" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.273616 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4kgs\" (UniqueName: \"kubernetes.io/projected/fa7c0b48-4d54-47f6-b862-42c3caeedb80-kube-api-access-l4kgs\") pod \"barbican-db-create-rgx9x\" (UID: \"fa7c0b48-4d54-47f6-b862-42c3caeedb80\") " pod="openstack/barbican-db-create-rgx9x" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.273644 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa7c0b48-4d54-47f6-b862-42c3caeedb80-operator-scripts\") pod \"barbican-db-create-rgx9x\" (UID: \"fa7c0b48-4d54-47f6-b862-42c3caeedb80\") " pod="openstack/barbican-db-create-rgx9x" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.273715 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-combined-ca-bundle\") pod \"keystone-db-sync-krtrd\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.273748 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0b11f58-4665-4f50-83ec-48cfa18b3499-operator-scripts\") pod \"neutron-db-create-h9ppw\" (UID: \"a0b11f58-4665-4f50-83ec-48cfa18b3499\") " pod="openstack/neutron-db-create-h9ppw" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.273763 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-config-data\") pod \"keystone-db-sync-krtrd\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.273843 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2b184729-91fb-4ebc-8ef1-b81a2aebc754-operator-scripts\") pod \"barbican-5705-account-create-update-mwnmq\" (UID: \"2b184729-91fb-4ebc-8ef1-b81a2aebc754\") " pod="openstack/barbican-5705-account-create-update-mwnmq" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.273919 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkfnz\" (UniqueName: \"kubernetes.io/projected/360af2ab-a220-45ad-94cf-87415175d269-kube-api-access-dkfnz\") pod \"keystone-db-sync-krtrd\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.273938 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pggw8\" (UniqueName: \"kubernetes.io/projected/a0b11f58-4665-4f50-83ec-48cfa18b3499-kube-api-access-pggw8\") pod \"neutron-db-create-h9ppw\" (UID: \"a0b11f58-4665-4f50-83ec-48cfa18b3499\") " pod="openstack/neutron-db-create-h9ppw" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.274583 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa7c0b48-4d54-47f6-b862-42c3caeedb80-operator-scripts\") pod \"barbican-db-create-rgx9x\" (UID: \"fa7c0b48-4d54-47f6-b862-42c3caeedb80\") " pod="openstack/barbican-db-create-rgx9x" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.275561 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2b184729-91fb-4ebc-8ef1-b81a2aebc754-operator-scripts\") pod \"barbican-5705-account-create-update-mwnmq\" (UID: \"2b184729-91fb-4ebc-8ef1-b81a2aebc754\") " pod="openstack/barbican-5705-account-create-update-mwnmq" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.300829 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4kgs\" (UniqueName: \"kubernetes.io/projected/fa7c0b48-4d54-47f6-b862-42c3caeedb80-kube-api-access-l4kgs\") pod \"barbican-db-create-rgx9x\" (UID: \"fa7c0b48-4d54-47f6-b862-42c3caeedb80\") " pod="openstack/barbican-db-create-rgx9x" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.322350 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpn7x\" (UniqueName: \"kubernetes.io/projected/2b184729-91fb-4ebc-8ef1-b81a2aebc754-kube-api-access-rpn7x\") pod \"barbican-5705-account-create-update-mwnmq\" (UID: \"2b184729-91fb-4ebc-8ef1-b81a2aebc754\") " pod="openstack/barbican-5705-account-create-update-mwnmq" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.330451 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5705-account-create-update-mwnmq" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.347908 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-03dc-account-create-update-sn2k6"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.350839 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-rgx9x" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.351192 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-03dc-account-create-update-sn2k6" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.353411 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.362114 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-03dc-account-create-update-sn2k6"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.375141 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-combined-ca-bundle\") pod \"keystone-db-sync-krtrd\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.375196 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0b11f58-4665-4f50-83ec-48cfa18b3499-operator-scripts\") pod \"neutron-db-create-h9ppw\" (UID: \"a0b11f58-4665-4f50-83ec-48cfa18b3499\") " pod="openstack/neutron-db-create-h9ppw" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.375219 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-config-data\") pod \"keystone-db-sync-krtrd\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.375263 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2xff\" (UniqueName: \"kubernetes.io/projected/628af1b6-f4bb-4235-a14e-6a72d3f40830-kube-api-access-f2xff\") pod \"cinder-3d3b-account-create-update-62lns\" (UID: \"628af1b6-f4bb-4235-a14e-6a72d3f40830\") " pod="openstack/cinder-3d3b-account-create-update-62lns" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.375348 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkfnz\" (UniqueName: \"kubernetes.io/projected/360af2ab-a220-45ad-94cf-87415175d269-kube-api-access-dkfnz\") pod \"keystone-db-sync-krtrd\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.375375 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pggw8\" (UniqueName: \"kubernetes.io/projected/a0b11f58-4665-4f50-83ec-48cfa18b3499-kube-api-access-pggw8\") pod \"neutron-db-create-h9ppw\" (UID: \"a0b11f58-4665-4f50-83ec-48cfa18b3499\") " pod="openstack/neutron-db-create-h9ppw" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.375408 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/628af1b6-f4bb-4235-a14e-6a72d3f40830-operator-scripts\") pod \"cinder-3d3b-account-create-update-62lns\" (UID: \"628af1b6-f4bb-4235-a14e-6a72d3f40830\") " pod="openstack/cinder-3d3b-account-create-update-62lns" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.378043 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0b11f58-4665-4f50-83ec-48cfa18b3499-operator-scripts\") pod \"neutron-db-create-h9ppw\" (UID: \"a0b11f58-4665-4f50-83ec-48cfa18b3499\") " pod="openstack/neutron-db-create-h9ppw" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.381380 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-combined-ca-bundle\") pod \"keystone-db-sync-krtrd\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.392458 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-config-data\") pod \"keystone-db-sync-krtrd\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.400825 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkfnz\" (UniqueName: \"kubernetes.io/projected/360af2ab-a220-45ad-94cf-87415175d269-kube-api-access-dkfnz\") pod \"keystone-db-sync-krtrd\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.405224 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pggw8\" (UniqueName: \"kubernetes.io/projected/a0b11f58-4665-4f50-83ec-48cfa18b3499-kube-api-access-pggw8\") pod \"neutron-db-create-h9ppw\" (UID: \"a0b11f58-4665-4f50-83ec-48cfa18b3499\") " pod="openstack/neutron-db-create-h9ppw" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.434767 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.476865 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2xff\" (UniqueName: \"kubernetes.io/projected/628af1b6-f4bb-4235-a14e-6a72d3f40830-kube-api-access-f2xff\") pod \"cinder-3d3b-account-create-update-62lns\" (UID: \"628af1b6-f4bb-4235-a14e-6a72d3f40830\") " pod="openstack/cinder-3d3b-account-create-update-62lns" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.483530 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bm7q\" (UniqueName: \"kubernetes.io/projected/879465eb-d451-428b-8f21-d4f47afe9ada-kube-api-access-8bm7q\") pod \"neutron-03dc-account-create-update-sn2k6\" (UID: \"879465eb-d451-428b-8f21-d4f47afe9ada\") " pod="openstack/neutron-03dc-account-create-update-sn2k6" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.483595 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/628af1b6-f4bb-4235-a14e-6a72d3f40830-operator-scripts\") pod \"cinder-3d3b-account-create-update-62lns\" (UID: \"628af1b6-f4bb-4235-a14e-6a72d3f40830\") " pod="openstack/cinder-3d3b-account-create-update-62lns" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.483645 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/879465eb-d451-428b-8f21-d4f47afe9ada-operator-scripts\") pod \"neutron-03dc-account-create-update-sn2k6\" (UID: \"879465eb-d451-428b-8f21-d4f47afe9ada\") " pod="openstack/neutron-03dc-account-create-update-sn2k6" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.484599 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/628af1b6-f4bb-4235-a14e-6a72d3f40830-operator-scripts\") pod \"cinder-3d3b-account-create-update-62lns\" (UID: \"628af1b6-f4bb-4235-a14e-6a72d3f40830\") " pod="openstack/cinder-3d3b-account-create-update-62lns" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.516776 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2xff\" (UniqueName: \"kubernetes.io/projected/628af1b6-f4bb-4235-a14e-6a72d3f40830-kube-api-access-f2xff\") pod \"cinder-3d3b-account-create-update-62lns\" (UID: \"628af1b6-f4bb-4235-a14e-6a72d3f40830\") " pod="openstack/cinder-3d3b-account-create-update-62lns" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.552888 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h9ppw" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.568795 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3d3b-account-create-update-62lns" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.585360 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/879465eb-d451-428b-8f21-d4f47afe9ada-operator-scripts\") pod \"neutron-03dc-account-create-update-sn2k6\" (UID: \"879465eb-d451-428b-8f21-d4f47afe9ada\") " pod="openstack/neutron-03dc-account-create-update-sn2k6" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.585553 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bm7q\" (UniqueName: \"kubernetes.io/projected/879465eb-d451-428b-8f21-d4f47afe9ada-kube-api-access-8bm7q\") pod \"neutron-03dc-account-create-update-sn2k6\" (UID: \"879465eb-d451-428b-8f21-d4f47afe9ada\") " pod="openstack/neutron-03dc-account-create-update-sn2k6" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.586030 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/879465eb-d451-428b-8f21-d4f47afe9ada-operator-scripts\") pod \"neutron-03dc-account-create-update-sn2k6\" (UID: \"879465eb-d451-428b-8f21-d4f47afe9ada\") " pod="openstack/neutron-03dc-account-create-update-sn2k6" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.607126 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bm7q\" (UniqueName: \"kubernetes.io/projected/879465eb-d451-428b-8f21-d4f47afe9ada-kube-api-access-8bm7q\") pod \"neutron-03dc-account-create-update-sn2k6\" (UID: \"879465eb-d451-428b-8f21-d4f47afe9ada\") " pod="openstack/neutron-03dc-account-create-update-sn2k6" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.696990 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-vmwqm"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.701610 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-03dc-account-create-update-sn2k6" Feb 02 17:08:21 crc kubenswrapper[4835]: W0202 17:08:21.734113 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f983fbe_e05f_4c6a_8759_0d6578726175.slice/crio-d65cbe51cf542ade8786f1acc5944041e2755fd2731d9cc3c58ccad2089c453e WatchSource:0}: Error finding container d65cbe51cf542ade8786f1acc5944041e2755fd2731d9cc3c58ccad2089c453e: Status 404 returned error can't find the container with id d65cbe51cf542ade8786f1acc5944041e2755fd2731d9cc3c58ccad2089c453e Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.816819 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.974576 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-rgx9x"] Feb 02 17:08:21 crc kubenswrapper[4835]: W0202 17:08:21.974928 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa7c0b48_4d54_47f6_b862_42c3caeedb80.slice/crio-bd7a1a0f601054aaf144aa080e712be269be7bd77b66a20c71bf222b4a801bd1 WatchSource:0}: Error finding container bd7a1a0f601054aaf144aa080e712be269be7bd77b66a20c71bf222b4a801bd1: Status 404 returned error can't find the container with id bd7a1a0f601054aaf144aa080e712be269be7bd77b66a20c71bf222b4a801bd1 Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.993945 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-smzl5" event={"ID":"0f09b478-2bc6-4e0c-958f-7cab0354d7d4","Type":"ContainerDied","Data":"c7bc7424aea84cc93d98101d400551c30b50e61ed97ed53939a0f4fd0717233b"} Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.994011 4835 scope.go:117] "RemoveContainer" containerID="b7065bb9bb259c8dd1bbf1cdd83e2744622006063b844fcaf37ccc773b58122d" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.994170 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-smzl5" Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.994821 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-5705-account-create-update-mwnmq"] Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.997339 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-sb\") pod \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.997411 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-vmwqm" event={"ID":"3f983fbe-e05f-4c6a-8759-0d6578726175","Type":"ContainerStarted","Data":"d65cbe51cf542ade8786f1acc5944041e2755fd2731d9cc3c58ccad2089c453e"} Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.997420 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zl7r\" (UniqueName: \"kubernetes.io/projected/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-kube-api-access-5zl7r\") pod \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.997521 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-dns-svc\") pod \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.997598 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-config\") pod \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " Feb 02 17:08:21 crc kubenswrapper[4835]: I0202 17:08:21.997636 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-nb\") pod \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\" (UID: \"0f09b478-2bc6-4e0c-958f-7cab0354d7d4\") " Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.017949 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-kube-api-access-5zl7r" (OuterVolumeSpecName: "kube-api-access-5zl7r") pod "0f09b478-2bc6-4e0c-958f-7cab0354d7d4" (UID: "0f09b478-2bc6-4e0c-958f-7cab0354d7d4"). InnerVolumeSpecName "kube-api-access-5zl7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.024694 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-krtrd"] Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.058350 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0f09b478-2bc6-4e0c-958f-7cab0354d7d4" (UID: "0f09b478-2bc6-4e0c-958f-7cab0354d7d4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.078730 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0f09b478-2bc6-4e0c-958f-7cab0354d7d4" (UID: "0f09b478-2bc6-4e0c-958f-7cab0354d7d4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.083002 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-config" (OuterVolumeSpecName: "config") pod "0f09b478-2bc6-4e0c-958f-7cab0354d7d4" (UID: "0f09b478-2bc6-4e0c-958f-7cab0354d7d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.100304 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.100332 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.100342 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.100352 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zl7r\" (UniqueName: \"kubernetes.io/projected/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-kube-api-access-5zl7r\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.105217 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0f09b478-2bc6-4e0c-958f-7cab0354d7d4" (UID: "0f09b478-2bc6-4e0c-958f-7cab0354d7d4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.160991 4835 scope.go:117] "RemoveContainer" containerID="0857dc59876a83aa4ce94f8bee5c02f6f740dfa1cb8c49b3120500b4db3ede6a" Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.202327 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f09b478-2bc6-4e0c-958f-7cab0354d7d4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.252592 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3d3b-account-create-update-62lns"] Feb 02 17:08:22 crc kubenswrapper[4835]: W0202 17:08:22.261628 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod628af1b6_f4bb_4235_a14e_6a72d3f40830.slice/crio-71e62087d002b9bdc7ebcddfb32eec3d41cf29d9a691ab0a1dd96c83ae21b4d0 WatchSource:0}: Error finding container 71e62087d002b9bdc7ebcddfb32eec3d41cf29d9a691ab0a1dd96c83ae21b4d0: Status 404 returned error can't find the container with id 71e62087d002b9bdc7ebcddfb32eec3d41cf29d9a691ab0a1dd96c83ae21b4d0 Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.291880 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-h9ppw"] Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.340023 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-smzl5"] Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.353440 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-smzl5"] Feb 02 17:08:22 crc kubenswrapper[4835]: I0202 17:08:22.403913 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-03dc-account-create-update-sn2k6"] Feb 02 17:08:22 crc kubenswrapper[4835]: W0202 17:08:22.430635 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod879465eb_d451_428b_8f21_d4f47afe9ada.slice/crio-666ca3d4ae8b162168df3b68187dbdb37f70a6bbe4b2488e7a18ab3fdca0623c WatchSource:0}: Error finding container 666ca3d4ae8b162168df3b68187dbdb37f70a6bbe4b2488e7a18ab3fdca0623c: Status 404 returned error can't find the container with id 666ca3d4ae8b162168df3b68187dbdb37f70a6bbe4b2488e7a18ab3fdca0623c Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.041940 4835 generic.go:334] "Generic (PLEG): container finished" podID="2b184729-91fb-4ebc-8ef1-b81a2aebc754" containerID="b83db10bd2169b0573e8cadb9407af9455a2182698097be42f06879c8e3ecba7" exitCode=0 Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.042289 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5705-account-create-update-mwnmq" event={"ID":"2b184729-91fb-4ebc-8ef1-b81a2aebc754","Type":"ContainerDied","Data":"b83db10bd2169b0573e8cadb9407af9455a2182698097be42f06879c8e3ecba7"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.042325 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5705-account-create-update-mwnmq" event={"ID":"2b184729-91fb-4ebc-8ef1-b81a2aebc754","Type":"ContainerStarted","Data":"53b65fcae2addec28f84483c7b58ce5b5bac74ef8978205087b3543b4649d3a7"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.048393 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-03dc-account-create-update-sn2k6" event={"ID":"879465eb-d451-428b-8f21-d4f47afe9ada","Type":"ContainerStarted","Data":"a20068a59cd5fffb027c45bb7ebbe3177ad9eb0bf1f20f1e0d4f9c81daaeaf02"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.048461 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-03dc-account-create-update-sn2k6" event={"ID":"879465eb-d451-428b-8f21-d4f47afe9ada","Type":"ContainerStarted","Data":"666ca3d4ae8b162168df3b68187dbdb37f70a6bbe4b2488e7a18ab3fdca0623c"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.055354 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-krtrd" event={"ID":"360af2ab-a220-45ad-94cf-87415175d269","Type":"ContainerStarted","Data":"c9249e012567bbe8664c9179b699a459d6c562005f8fe9e18aae0e0e94823a54"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.056692 4835 generic.go:334] "Generic (PLEG): container finished" podID="a0b11f58-4665-4f50-83ec-48cfa18b3499" containerID="1ae3daae2474db250f31442d1e93bc0da7047890afe782281e64ce5eeb0a2ca4" exitCode=0 Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.056747 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-h9ppw" event={"ID":"a0b11f58-4665-4f50-83ec-48cfa18b3499","Type":"ContainerDied","Data":"1ae3daae2474db250f31442d1e93bc0da7047890afe782281e64ce5eeb0a2ca4"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.056769 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-h9ppw" event={"ID":"a0b11f58-4665-4f50-83ec-48cfa18b3499","Type":"ContainerStarted","Data":"521da892306b0fda40ce0af75a15f8bdfccc7940f0aee9f54b3539cee83317cc"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.070571 4835 generic.go:334] "Generic (PLEG): container finished" podID="fa7c0b48-4d54-47f6-b862-42c3caeedb80" containerID="421cb20ec708a4903c8a9c761652d6b6ca91e86ab8153782a608d5dd4973fdf6" exitCode=0 Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.070678 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-rgx9x" event={"ID":"fa7c0b48-4d54-47f6-b862-42c3caeedb80","Type":"ContainerDied","Data":"421cb20ec708a4903c8a9c761652d6b6ca91e86ab8153782a608d5dd4973fdf6"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.070708 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-rgx9x" event={"ID":"fa7c0b48-4d54-47f6-b862-42c3caeedb80","Type":"ContainerStarted","Data":"bd7a1a0f601054aaf144aa080e712be269be7bd77b66a20c71bf222b4a801bd1"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.103230 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-03dc-account-create-update-sn2k6" podStartSLOduration=2.103209965 podStartE2EDuration="2.103209965s" podCreationTimestamp="2026-02-02 17:08:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:08:23.099238643 +0000 UTC m=+1094.720842723" watchObservedRunningTime="2026-02-02 17:08:23.103209965 +0000 UTC m=+1094.724814045" Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.106496 4835 generic.go:334] "Generic (PLEG): container finished" podID="3f983fbe-e05f-4c6a-8759-0d6578726175" containerID="6c2502d5dfb6d2e0a339e33ccb89f4fa7bcd4887d1335c82fdf1399ea07a370f" exitCode=0 Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.106575 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-vmwqm" event={"ID":"3f983fbe-e05f-4c6a-8759-0d6578726175","Type":"ContainerDied","Data":"6c2502d5dfb6d2e0a339e33ccb89f4fa7bcd4887d1335c82fdf1399ea07a370f"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.134472 4835 generic.go:334] "Generic (PLEG): container finished" podID="628af1b6-f4bb-4235-a14e-6a72d3f40830" containerID="7057d6711b021ccf6b0f56839384edc2db6d7aea71b87961dfba1e54e94f5a0b" exitCode=0 Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.134519 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3d3b-account-create-update-62lns" event={"ID":"628af1b6-f4bb-4235-a14e-6a72d3f40830","Type":"ContainerDied","Data":"7057d6711b021ccf6b0f56839384edc2db6d7aea71b87961dfba1e54e94f5a0b"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.134545 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3d3b-account-create-update-62lns" event={"ID":"628af1b6-f4bb-4235-a14e-6a72d3f40830","Type":"ContainerStarted","Data":"71e62087d002b9bdc7ebcddfb32eec3d41cf29d9a691ab0a1dd96c83ae21b4d0"} Feb 02 17:08:23 crc kubenswrapper[4835]: I0202 17:08:23.210058 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f09b478-2bc6-4e0c-958f-7cab0354d7d4" path="/var/lib/kubelet/pods/0f09b478-2bc6-4e0c-958f-7cab0354d7d4/volumes" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.147970 4835 generic.go:334] "Generic (PLEG): container finished" podID="879465eb-d451-428b-8f21-d4f47afe9ada" containerID="a20068a59cd5fffb027c45bb7ebbe3177ad9eb0bf1f20f1e0d4f9c81daaeaf02" exitCode=0 Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.148069 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-03dc-account-create-update-sn2k6" event={"ID":"879465eb-d451-428b-8f21-d4f47afe9ada","Type":"ContainerDied","Data":"a20068a59cd5fffb027c45bb7ebbe3177ad9eb0bf1f20f1e0d4f9c81daaeaf02"} Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.551340 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-rgx9x" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.683098 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4kgs\" (UniqueName: \"kubernetes.io/projected/fa7c0b48-4d54-47f6-b862-42c3caeedb80-kube-api-access-l4kgs\") pod \"fa7c0b48-4d54-47f6-b862-42c3caeedb80\" (UID: \"fa7c0b48-4d54-47f6-b862-42c3caeedb80\") " Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.683193 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa7c0b48-4d54-47f6-b862-42c3caeedb80-operator-scripts\") pod \"fa7c0b48-4d54-47f6-b862-42c3caeedb80\" (UID: \"fa7c0b48-4d54-47f6-b862-42c3caeedb80\") " Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.686686 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa7c0b48-4d54-47f6-b862-42c3caeedb80-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fa7c0b48-4d54-47f6-b862-42c3caeedb80" (UID: "fa7c0b48-4d54-47f6-b862-42c3caeedb80"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.708032 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa7c0b48-4d54-47f6-b862-42c3caeedb80-kube-api-access-l4kgs" (OuterVolumeSpecName: "kube-api-access-l4kgs") pod "fa7c0b48-4d54-47f6-b862-42c3caeedb80" (UID: "fa7c0b48-4d54-47f6-b862-42c3caeedb80"). InnerVolumeSpecName "kube-api-access-l4kgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.774258 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vmwqm" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.784642 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4kgs\" (UniqueName: \"kubernetes.io/projected/fa7c0b48-4d54-47f6-b862-42c3caeedb80-kube-api-access-l4kgs\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.784670 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa7c0b48-4d54-47f6-b862-42c3caeedb80-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.791033 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h9ppw" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.800581 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3d3b-account-create-update-62lns" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.886968 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2xff\" (UniqueName: \"kubernetes.io/projected/628af1b6-f4bb-4235-a14e-6a72d3f40830-kube-api-access-f2xff\") pod \"628af1b6-f4bb-4235-a14e-6a72d3f40830\" (UID: \"628af1b6-f4bb-4235-a14e-6a72d3f40830\") " Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.887022 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/628af1b6-f4bb-4235-a14e-6a72d3f40830-operator-scripts\") pod \"628af1b6-f4bb-4235-a14e-6a72d3f40830\" (UID: \"628af1b6-f4bb-4235-a14e-6a72d3f40830\") " Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.887097 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pggw8\" (UniqueName: \"kubernetes.io/projected/a0b11f58-4665-4f50-83ec-48cfa18b3499-kube-api-access-pggw8\") pod \"a0b11f58-4665-4f50-83ec-48cfa18b3499\" (UID: \"a0b11f58-4665-4f50-83ec-48cfa18b3499\") " Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.887184 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmmn8\" (UniqueName: \"kubernetes.io/projected/3f983fbe-e05f-4c6a-8759-0d6578726175-kube-api-access-tmmn8\") pod \"3f983fbe-e05f-4c6a-8759-0d6578726175\" (UID: \"3f983fbe-e05f-4c6a-8759-0d6578726175\") " Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.887224 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0b11f58-4665-4f50-83ec-48cfa18b3499-operator-scripts\") pod \"a0b11f58-4665-4f50-83ec-48cfa18b3499\" (UID: \"a0b11f58-4665-4f50-83ec-48cfa18b3499\") " Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.887307 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f983fbe-e05f-4c6a-8759-0d6578726175-operator-scripts\") pod \"3f983fbe-e05f-4c6a-8759-0d6578726175\" (UID: \"3f983fbe-e05f-4c6a-8759-0d6578726175\") " Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.887477 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/628af1b6-f4bb-4235-a14e-6a72d3f40830-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "628af1b6-f4bb-4235-a14e-6a72d3f40830" (UID: "628af1b6-f4bb-4235-a14e-6a72d3f40830"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.887656 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/628af1b6-f4bb-4235-a14e-6a72d3f40830-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.887962 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f983fbe-e05f-4c6a-8759-0d6578726175-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3f983fbe-e05f-4c6a-8759-0d6578726175" (UID: "3f983fbe-e05f-4c6a-8759-0d6578726175"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.887988 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0b11f58-4665-4f50-83ec-48cfa18b3499-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a0b11f58-4665-4f50-83ec-48cfa18b3499" (UID: "a0b11f58-4665-4f50-83ec-48cfa18b3499"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.890660 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0b11f58-4665-4f50-83ec-48cfa18b3499-kube-api-access-pggw8" (OuterVolumeSpecName: "kube-api-access-pggw8") pod "a0b11f58-4665-4f50-83ec-48cfa18b3499" (UID: "a0b11f58-4665-4f50-83ec-48cfa18b3499"). InnerVolumeSpecName "kube-api-access-pggw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.891043 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f983fbe-e05f-4c6a-8759-0d6578726175-kube-api-access-tmmn8" (OuterVolumeSpecName: "kube-api-access-tmmn8") pod "3f983fbe-e05f-4c6a-8759-0d6578726175" (UID: "3f983fbe-e05f-4c6a-8759-0d6578726175"). InnerVolumeSpecName "kube-api-access-tmmn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.891228 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/628af1b6-f4bb-4235-a14e-6a72d3f40830-kube-api-access-f2xff" (OuterVolumeSpecName: "kube-api-access-f2xff") pod "628af1b6-f4bb-4235-a14e-6a72d3f40830" (UID: "628af1b6-f4bb-4235-a14e-6a72d3f40830"). InnerVolumeSpecName "kube-api-access-f2xff". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.899977 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5705-account-create-update-mwnmq" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.988789 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2b184729-91fb-4ebc-8ef1-b81a2aebc754-operator-scripts\") pod \"2b184729-91fb-4ebc-8ef1-b81a2aebc754\" (UID: \"2b184729-91fb-4ebc-8ef1-b81a2aebc754\") " Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.988896 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpn7x\" (UniqueName: \"kubernetes.io/projected/2b184729-91fb-4ebc-8ef1-b81a2aebc754-kube-api-access-rpn7x\") pod \"2b184729-91fb-4ebc-8ef1-b81a2aebc754\" (UID: \"2b184729-91fb-4ebc-8ef1-b81a2aebc754\") " Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.989307 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b184729-91fb-4ebc-8ef1-b81a2aebc754-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2b184729-91fb-4ebc-8ef1-b81a2aebc754" (UID: "2b184729-91fb-4ebc-8ef1-b81a2aebc754"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.989321 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2xff\" (UniqueName: \"kubernetes.io/projected/628af1b6-f4bb-4235-a14e-6a72d3f40830-kube-api-access-f2xff\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.989376 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pggw8\" (UniqueName: \"kubernetes.io/projected/a0b11f58-4665-4f50-83ec-48cfa18b3499-kube-api-access-pggw8\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.989388 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmmn8\" (UniqueName: \"kubernetes.io/projected/3f983fbe-e05f-4c6a-8759-0d6578726175-kube-api-access-tmmn8\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.989397 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0b11f58-4665-4f50-83ec-48cfa18b3499-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.989406 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f983fbe-e05f-4c6a-8759-0d6578726175-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:24 crc kubenswrapper[4835]: I0202 17:08:24.993150 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b184729-91fb-4ebc-8ef1-b81a2aebc754-kube-api-access-rpn7x" (OuterVolumeSpecName: "kube-api-access-rpn7x") pod "2b184729-91fb-4ebc-8ef1-b81a2aebc754" (UID: "2b184729-91fb-4ebc-8ef1-b81a2aebc754"). InnerVolumeSpecName "kube-api-access-rpn7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.091188 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpn7x\" (UniqueName: \"kubernetes.io/projected/2b184729-91fb-4ebc-8ef1-b81a2aebc754-kube-api-access-rpn7x\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.091231 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2b184729-91fb-4ebc-8ef1-b81a2aebc754-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.157401 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-vmwqm" event={"ID":"3f983fbe-e05f-4c6a-8759-0d6578726175","Type":"ContainerDied","Data":"d65cbe51cf542ade8786f1acc5944041e2755fd2731d9cc3c58ccad2089c453e"} Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.157481 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d65cbe51cf542ade8786f1acc5944041e2755fd2731d9cc3c58ccad2089c453e" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.157421 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vmwqm" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.160220 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3d3b-account-create-update-62lns" event={"ID":"628af1b6-f4bb-4235-a14e-6a72d3f40830","Type":"ContainerDied","Data":"71e62087d002b9bdc7ebcddfb32eec3d41cf29d9a691ab0a1dd96c83ae21b4d0"} Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.160264 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71e62087d002b9bdc7ebcddfb32eec3d41cf29d9a691ab0a1dd96c83ae21b4d0" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.160230 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3d3b-account-create-update-62lns" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.176130 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5705-account-create-update-mwnmq" event={"ID":"2b184729-91fb-4ebc-8ef1-b81a2aebc754","Type":"ContainerDied","Data":"53b65fcae2addec28f84483c7b58ce5b5bac74ef8978205087b3543b4649d3a7"} Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.176164 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53b65fcae2addec28f84483c7b58ce5b5bac74ef8978205087b3543b4649d3a7" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.176164 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5705-account-create-update-mwnmq" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.177617 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-h9ppw" event={"ID":"a0b11f58-4665-4f50-83ec-48cfa18b3499","Type":"ContainerDied","Data":"521da892306b0fda40ce0af75a15f8bdfccc7940f0aee9f54b3539cee83317cc"} Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.177636 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="521da892306b0fda40ce0af75a15f8bdfccc7940f0aee9f54b3539cee83317cc" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.177691 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h9ppw" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.179017 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-rgx9x" Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.179050 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-rgx9x" event={"ID":"fa7c0b48-4d54-47f6-b862-42c3caeedb80","Type":"ContainerDied","Data":"bd7a1a0f601054aaf144aa080e712be269be7bd77b66a20c71bf222b4a801bd1"} Feb 02 17:08:25 crc kubenswrapper[4835]: I0202 17:08:25.179077 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd7a1a0f601054aaf144aa080e712be269be7bd77b66a20c71bf222b4a801bd1" Feb 02 17:08:28 crc kubenswrapper[4835]: I0202 17:08:28.209856 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-03dc-account-create-update-sn2k6" event={"ID":"879465eb-d451-428b-8f21-d4f47afe9ada","Type":"ContainerDied","Data":"666ca3d4ae8b162168df3b68187dbdb37f70a6bbe4b2488e7a18ab3fdca0623c"} Feb 02 17:08:28 crc kubenswrapper[4835]: I0202 17:08:28.210600 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="666ca3d4ae8b162168df3b68187dbdb37f70a6bbe4b2488e7a18ab3fdca0623c" Feb 02 17:08:28 crc kubenswrapper[4835]: I0202 17:08:28.351742 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-03dc-account-create-update-sn2k6" Feb 02 17:08:28 crc kubenswrapper[4835]: I0202 17:08:28.551702 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/879465eb-d451-428b-8f21-d4f47afe9ada-operator-scripts\") pod \"879465eb-d451-428b-8f21-d4f47afe9ada\" (UID: \"879465eb-d451-428b-8f21-d4f47afe9ada\") " Feb 02 17:08:28 crc kubenswrapper[4835]: I0202 17:08:28.552165 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bm7q\" (UniqueName: \"kubernetes.io/projected/879465eb-d451-428b-8f21-d4f47afe9ada-kube-api-access-8bm7q\") pod \"879465eb-d451-428b-8f21-d4f47afe9ada\" (UID: \"879465eb-d451-428b-8f21-d4f47afe9ada\") " Feb 02 17:08:28 crc kubenswrapper[4835]: I0202 17:08:28.552540 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/879465eb-d451-428b-8f21-d4f47afe9ada-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "879465eb-d451-428b-8f21-d4f47afe9ada" (UID: "879465eb-d451-428b-8f21-d4f47afe9ada"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:28 crc kubenswrapper[4835]: I0202 17:08:28.601193 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/879465eb-d451-428b-8f21-d4f47afe9ada-kube-api-access-8bm7q" (OuterVolumeSpecName: "kube-api-access-8bm7q") pod "879465eb-d451-428b-8f21-d4f47afe9ada" (UID: "879465eb-d451-428b-8f21-d4f47afe9ada"). InnerVolumeSpecName "kube-api-access-8bm7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:28 crc kubenswrapper[4835]: I0202 17:08:28.654136 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/879465eb-d451-428b-8f21-d4f47afe9ada-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:28 crc kubenswrapper[4835]: I0202 17:08:28.654174 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bm7q\" (UniqueName: \"kubernetes.io/projected/879465eb-d451-428b-8f21-d4f47afe9ada-kube-api-access-8bm7q\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:29 crc kubenswrapper[4835]: I0202 17:08:29.219515 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-03dc-account-create-update-sn2k6" Feb 02 17:08:29 crc kubenswrapper[4835]: I0202 17:08:29.219486 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-krtrd" event={"ID":"360af2ab-a220-45ad-94cf-87415175d269","Type":"ContainerStarted","Data":"3c78a6ca02fca1ebd6ed30ff1e6a7bc98054b1686135fee3ecc37f6eab2a2186"} Feb 02 17:08:29 crc kubenswrapper[4835]: I0202 17:08:29.235664 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-krtrd" podStartSLOduration=2.04956629 podStartE2EDuration="8.235644683s" podCreationTimestamp="2026-02-02 17:08:21 +0000 UTC" firstStartedPulling="2026-02-02 17:08:22.055169663 +0000 UTC m=+1093.676773743" lastFinishedPulling="2026-02-02 17:08:28.241248046 +0000 UTC m=+1099.862852136" observedRunningTime="2026-02-02 17:08:29.23448319 +0000 UTC m=+1100.856087280" watchObservedRunningTime="2026-02-02 17:08:29.235644683 +0000 UTC m=+1100.857248763" Feb 02 17:08:31 crc kubenswrapper[4835]: I0202 17:08:31.233868 4835 generic.go:334] "Generic (PLEG): container finished" podID="360af2ab-a220-45ad-94cf-87415175d269" containerID="3c78a6ca02fca1ebd6ed30ff1e6a7bc98054b1686135fee3ecc37f6eab2a2186" exitCode=0 Feb 02 17:08:31 crc kubenswrapper[4835]: I0202 17:08:31.233961 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-krtrd" event={"ID":"360af2ab-a220-45ad-94cf-87415175d269","Type":"ContainerDied","Data":"3c78a6ca02fca1ebd6ed30ff1e6a7bc98054b1686135fee3ecc37f6eab2a2186"} Feb 02 17:08:32 crc kubenswrapper[4835]: I0202 17:08:32.551751 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:32 crc kubenswrapper[4835]: I0202 17:08:32.719946 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-combined-ca-bundle\") pod \"360af2ab-a220-45ad-94cf-87415175d269\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " Feb 02 17:08:32 crc kubenswrapper[4835]: I0202 17:08:32.720074 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-config-data\") pod \"360af2ab-a220-45ad-94cf-87415175d269\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " Feb 02 17:08:32 crc kubenswrapper[4835]: I0202 17:08:32.720187 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkfnz\" (UniqueName: \"kubernetes.io/projected/360af2ab-a220-45ad-94cf-87415175d269-kube-api-access-dkfnz\") pod \"360af2ab-a220-45ad-94cf-87415175d269\" (UID: \"360af2ab-a220-45ad-94cf-87415175d269\") " Feb 02 17:08:32 crc kubenswrapper[4835]: I0202 17:08:32.727628 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/360af2ab-a220-45ad-94cf-87415175d269-kube-api-access-dkfnz" (OuterVolumeSpecName: "kube-api-access-dkfnz") pod "360af2ab-a220-45ad-94cf-87415175d269" (UID: "360af2ab-a220-45ad-94cf-87415175d269"). InnerVolumeSpecName "kube-api-access-dkfnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:32 crc kubenswrapper[4835]: I0202 17:08:32.751647 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "360af2ab-a220-45ad-94cf-87415175d269" (UID: "360af2ab-a220-45ad-94cf-87415175d269"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:32 crc kubenswrapper[4835]: I0202 17:08:32.767574 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-config-data" (OuterVolumeSpecName: "config-data") pod "360af2ab-a220-45ad-94cf-87415175d269" (UID: "360af2ab-a220-45ad-94cf-87415175d269"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:32 crc kubenswrapper[4835]: I0202 17:08:32.822036 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkfnz\" (UniqueName: \"kubernetes.io/projected/360af2ab-a220-45ad-94cf-87415175d269-kube-api-access-dkfnz\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:32 crc kubenswrapper[4835]: I0202 17:08:32.822074 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:32 crc kubenswrapper[4835]: I0202 17:08:32.822083 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/360af2ab-a220-45ad-94cf-87415175d269-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.251404 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-krtrd" event={"ID":"360af2ab-a220-45ad-94cf-87415175d269","Type":"ContainerDied","Data":"c9249e012567bbe8664c9179b699a459d6c562005f8fe9e18aae0e0e94823a54"} Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.251450 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9249e012567bbe8664c9179b699a459d6c562005f8fe9e18aae0e0e94823a54" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.251526 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-krtrd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.522916 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67795cd9-jjlmd"] Feb 02 17:08:33 crc kubenswrapper[4835]: E0202 17:08:33.523498 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f09b478-2bc6-4e0c-958f-7cab0354d7d4" containerName="init" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.523523 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f09b478-2bc6-4e0c-958f-7cab0354d7d4" containerName="init" Feb 02 17:08:33 crc kubenswrapper[4835]: E0202 17:08:33.523562 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="628af1b6-f4bb-4235-a14e-6a72d3f40830" containerName="mariadb-account-create-update" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.523574 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="628af1b6-f4bb-4235-a14e-6a72d3f40830" containerName="mariadb-account-create-update" Feb 02 17:08:33 crc kubenswrapper[4835]: E0202 17:08:33.523591 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b184729-91fb-4ebc-8ef1-b81a2aebc754" containerName="mariadb-account-create-update" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.523604 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b184729-91fb-4ebc-8ef1-b81a2aebc754" containerName="mariadb-account-create-update" Feb 02 17:08:33 crc kubenswrapper[4835]: E0202 17:08:33.523625 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0b11f58-4665-4f50-83ec-48cfa18b3499" containerName="mariadb-database-create" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.523636 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0b11f58-4665-4f50-83ec-48cfa18b3499" containerName="mariadb-database-create" Feb 02 17:08:33 crc kubenswrapper[4835]: E0202 17:08:33.523654 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f09b478-2bc6-4e0c-958f-7cab0354d7d4" containerName="dnsmasq-dns" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.523666 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f09b478-2bc6-4e0c-958f-7cab0354d7d4" containerName="dnsmasq-dns" Feb 02 17:08:33 crc kubenswrapper[4835]: E0202 17:08:33.523684 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f983fbe-e05f-4c6a-8759-0d6578726175" containerName="mariadb-database-create" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.523696 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f983fbe-e05f-4c6a-8759-0d6578726175" containerName="mariadb-database-create" Feb 02 17:08:33 crc kubenswrapper[4835]: E0202 17:08:33.523719 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="360af2ab-a220-45ad-94cf-87415175d269" containerName="keystone-db-sync" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.523731 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="360af2ab-a220-45ad-94cf-87415175d269" containerName="keystone-db-sync" Feb 02 17:08:33 crc kubenswrapper[4835]: E0202 17:08:33.523750 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa7c0b48-4d54-47f6-b862-42c3caeedb80" containerName="mariadb-database-create" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.523760 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa7c0b48-4d54-47f6-b862-42c3caeedb80" containerName="mariadb-database-create" Feb 02 17:08:33 crc kubenswrapper[4835]: E0202 17:08:33.523777 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="879465eb-d451-428b-8f21-d4f47afe9ada" containerName="mariadb-account-create-update" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.523789 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="879465eb-d451-428b-8f21-d4f47afe9ada" containerName="mariadb-account-create-update" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.524062 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f983fbe-e05f-4c6a-8759-0d6578726175" containerName="mariadb-database-create" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.524094 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b184729-91fb-4ebc-8ef1-b81a2aebc754" containerName="mariadb-account-create-update" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.524110 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa7c0b48-4d54-47f6-b862-42c3caeedb80" containerName="mariadb-database-create" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.524133 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="628af1b6-f4bb-4235-a14e-6a72d3f40830" containerName="mariadb-account-create-update" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.524155 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f09b478-2bc6-4e0c-958f-7cab0354d7d4" containerName="dnsmasq-dns" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.524184 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0b11f58-4665-4f50-83ec-48cfa18b3499" containerName="mariadb-database-create" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.524202 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="879465eb-d451-428b-8f21-d4f47afe9ada" containerName="mariadb-account-create-update" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.524214 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="360af2ab-a220-45ad-94cf-87415175d269" containerName="keystone-db-sync" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.525576 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.534380 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-jjlmd"] Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.565514 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-drpzs"] Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.566583 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.570510 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cnmxn" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.570677 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.570792 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.570906 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.571380 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.603636 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-drpzs"] Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.635216 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-config\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.635282 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.635302 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-dns-svc\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.635410 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcxsb\" (UniqueName: \"kubernetes.io/projected/53dc7208-924f-43a2-9828-779f33c0ed18-kube-api-access-rcxsb\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.635436 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.737219 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-dns-svc\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.737335 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-combined-ca-bundle\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.737395 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm9tf\" (UniqueName: \"kubernetes.io/projected/e9078f85-91e5-4084-8f5d-36ec6878cb05-kube-api-access-jm9tf\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.737435 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-config-data\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.737465 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcxsb\" (UniqueName: \"kubernetes.io/projected/53dc7208-924f-43a2-9828-779f33c0ed18-kube-api-access-rcxsb\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.737502 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-scripts\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.737526 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.737552 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-credential-keys\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.737591 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-fernet-keys\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.737630 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-config\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.737874 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.738482 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-dns-svc\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.738525 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.738619 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-config\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.738753 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.758587 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-5nshn"] Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.760217 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.764205 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.764246 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.764305 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-frjx6" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.774636 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcxsb\" (UniqueName: \"kubernetes.io/projected/53dc7208-924f-43a2-9828-779f33c0ed18-kube-api-access-rcxsb\") pod \"dnsmasq-dns-67795cd9-jjlmd\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.788017 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-5nshn"] Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.840134 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-config-data\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.840205 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-scripts\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.840236 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-credential-keys\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.840297 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-fernet-keys\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.840402 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-combined-ca-bundle\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.840449 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm9tf\" (UniqueName: \"kubernetes.io/projected/e9078f85-91e5-4084-8f5d-36ec6878cb05-kube-api-access-jm9tf\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.854249 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-combined-ca-bundle\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.858774 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-fernet-keys\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.860941 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-config-data\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.862731 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.869475 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-scripts\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.884093 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-credential-keys\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.918214 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.927019 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm9tf\" (UniqueName: \"kubernetes.io/projected/e9078f85-91e5-4084-8f5d-36ec6878cb05-kube-api-access-jm9tf\") pod \"keystone-bootstrap-drpzs\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.935233 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.944832 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-combined-ca-bundle\") pod \"neutron-db-sync-5nshn\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.944892 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkzk5\" (UniqueName: \"kubernetes.io/projected/5b04049d-f2c7-4368-969b-4b5d1d4628b8-kube-api-access-rkzk5\") pod \"neutron-db-sync-5nshn\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.944942 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-config\") pod \"neutron-db-sync-5nshn\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.958231 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.958397 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.996313 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-65fpg"] Feb 02 17:08:33 crc kubenswrapper[4835]: I0202 17:08:33.997375 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.007416 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.008849 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.010945 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-rglht" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.015514 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.046795 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-config-data\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.047088 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.047121 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-scripts\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.047158 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-combined-ca-bundle\") pod \"neutron-db-sync-5nshn\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.047196 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-run-httpd\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.047230 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkzk5\" (UniqueName: \"kubernetes.io/projected/5b04049d-f2c7-4368-969b-4b5d1d4628b8-kube-api-access-rkzk5\") pod \"neutron-db-sync-5nshn\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.047251 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-log-httpd\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.047320 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.047346 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-config\") pod \"neutron-db-sync-5nshn\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.047380 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prpvz\" (UniqueName: \"kubernetes.io/projected/660aa765-d3f6-4673-a4df-7e4b46ab60ac-kube-api-access-prpvz\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.058909 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-config\") pod \"neutron-db-sync-5nshn\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.063612 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-combined-ca-bundle\") pod \"neutron-db-sync-5nshn\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.100323 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-65fpg"] Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.123178 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkzk5\" (UniqueName: \"kubernetes.io/projected/5b04049d-f2c7-4368-969b-4b5d1d4628b8-kube-api-access-rkzk5\") pod \"neutron-db-sync-5nshn\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151111 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151168 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prpvz\" (UniqueName: \"kubernetes.io/projected/660aa765-d3f6-4673-a4df-7e4b46ab60ac-kube-api-access-prpvz\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151190 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-db-sync-config-data\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151228 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-config-data\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151259 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-scripts\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151377 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r2mg\" (UniqueName: \"kubernetes.io/projected/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-kube-api-access-8r2mg\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151413 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151434 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-scripts\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151460 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-combined-ca-bundle\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151484 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-run-httpd\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151511 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-config-data\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151526 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-etc-machine-id\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151543 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-log-httpd\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.151957 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-log-httpd\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.184900 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-run-httpd\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.190477 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.191755 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.195240 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-scripts\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.199632 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.220846 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-config-data\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.255566 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-db-sync-config-data\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.255668 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-scripts\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.255704 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r2mg\" (UniqueName: \"kubernetes.io/projected/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-kube-api-access-8r2mg\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.255753 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-combined-ca-bundle\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.255789 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-config-data\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.255810 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-etc-machine-id\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.255992 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-etc-machine-id\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.278749 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-scripts\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.337622 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-jjlmd"] Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.338002 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-db-sync-config-data\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.338555 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prpvz\" (UniqueName: \"kubernetes.io/projected/660aa765-d3f6-4673-a4df-7e4b46ab60ac-kube-api-access-prpvz\") pod \"ceilometer-0\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.341726 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-config-data\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.342140 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-combined-ca-bundle\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.343439 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.363025 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r2mg\" (UniqueName: \"kubernetes.io/projected/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-kube-api-access-8r2mg\") pod \"cinder-db-sync-65fpg\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.363655 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-bgpns"] Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.378035 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-bgpns" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.384604 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.384769 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-qn5jg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.400266 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-bgpns"] Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.414413 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-nhtch"] Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.416025 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.418814 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.419254 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-ng24p" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.420636 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.421088 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-65fpg" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.421490 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-82dzj"] Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.422623 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.422860 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.434302 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-nhtch"] Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.435297 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-82dzj"] Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462050 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462092 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08cf9281-9a97-420d-b734-735a7975dfe9-logs\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462122 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mm9h5\" (UniqueName: \"kubernetes.io/projected/eb0dd549-94f1-45b7-85c6-96039b500f39-kube-api-access-mm9h5\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462139 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7lpx\" (UniqueName: \"kubernetes.io/projected/08cf9281-9a97-420d-b734-735a7975dfe9-kube-api-access-t7lpx\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462154 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-scripts\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462188 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-db-sync-config-data\") pod \"barbican-db-sync-bgpns\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " pod="openstack/barbican-db-sync-bgpns" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462217 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-combined-ca-bundle\") pod \"barbican-db-sync-bgpns\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " pod="openstack/barbican-db-sync-bgpns" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462234 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-config\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462260 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9gxp\" (UniqueName: \"kubernetes.io/projected/6bbf2f76-2a57-4df0-989c-3a55710ef86c-kube-api-access-t9gxp\") pod \"barbican-db-sync-bgpns\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " pod="openstack/barbican-db-sync-bgpns" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462295 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462317 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462337 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-config-data\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.462352 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-combined-ca-bundle\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.568161 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-db-sync-config-data\") pod \"barbican-db-sync-bgpns\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " pod="openstack/barbican-db-sync-bgpns" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.568229 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-combined-ca-bundle\") pod \"barbican-db-sync-bgpns\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " pod="openstack/barbican-db-sync-bgpns" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.568252 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-config\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.572337 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9gxp\" (UniqueName: \"kubernetes.io/projected/6bbf2f76-2a57-4df0-989c-3a55710ef86c-kube-api-access-t9gxp\") pod \"barbican-db-sync-bgpns\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " pod="openstack/barbican-db-sync-bgpns" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.572409 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.572463 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.572511 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-config-data\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.572534 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-combined-ca-bundle\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.572643 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.572686 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08cf9281-9a97-420d-b734-735a7975dfe9-logs\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.572741 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mm9h5\" (UniqueName: \"kubernetes.io/projected/eb0dd549-94f1-45b7-85c6-96039b500f39-kube-api-access-mm9h5\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.572765 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7lpx\" (UniqueName: \"kubernetes.io/projected/08cf9281-9a97-420d-b734-735a7975dfe9-kube-api-access-t7lpx\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.572787 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-scripts\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.574231 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-combined-ca-bundle\") pod \"barbican-db-sync-bgpns\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " pod="openstack/barbican-db-sync-bgpns" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.574233 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-db-sync-config-data\") pod \"barbican-db-sync-bgpns\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " pod="openstack/barbican-db-sync-bgpns" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.575559 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-config\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.578445 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.578811 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08cf9281-9a97-420d-b734-735a7975dfe9-logs\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.579110 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-scripts\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.580293 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.581598 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.582056 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-config-data\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.597534 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-combined-ca-bundle\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.601806 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mm9h5\" (UniqueName: \"kubernetes.io/projected/eb0dd549-94f1-45b7-85c6-96039b500f39-kube-api-access-mm9h5\") pod \"dnsmasq-dns-5b6dbdb6f5-82dzj\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.601865 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9gxp\" (UniqueName: \"kubernetes.io/projected/6bbf2f76-2a57-4df0-989c-3a55710ef86c-kube-api-access-t9gxp\") pod \"barbican-db-sync-bgpns\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " pod="openstack/barbican-db-sync-bgpns" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.604001 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7lpx\" (UniqueName: \"kubernetes.io/projected/08cf9281-9a97-420d-b734-735a7975dfe9-kube-api-access-t7lpx\") pod \"placement-db-sync-nhtch\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.711244 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-jjlmd"] Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.783035 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-bgpns" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.803757 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-nhtch" Feb 02 17:08:34 crc kubenswrapper[4835]: I0202 17:08:34.835214 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.016838 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-drpzs"] Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.120298 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.239434 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-5nshn"] Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.285206 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-65fpg"] Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.426174 4835 generic.go:334] "Generic (PLEG): container finished" podID="53dc7208-924f-43a2-9828-779f33c0ed18" containerID="02f71d50a62c955c0f9015645f44e5b51f999dfae40ba7dc2a016351d6619667" exitCode=0 Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.426233 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-jjlmd" event={"ID":"53dc7208-924f-43a2-9828-779f33c0ed18","Type":"ContainerDied","Data":"02f71d50a62c955c0f9015645f44e5b51f999dfae40ba7dc2a016351d6619667"} Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.426261 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-jjlmd" event={"ID":"53dc7208-924f-43a2-9828-779f33c0ed18","Type":"ContainerStarted","Data":"1273ad488545a422a7976f884b9997d6be41fd6dc7add0b21f8dc23e287fc3bf"} Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.441088 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5nshn" event={"ID":"5b04049d-f2c7-4368-969b-4b5d1d4628b8","Type":"ContainerStarted","Data":"c821a1bb3c389d62fa0d871e042fa4a092e9d3f0ef5f04dcda88c9638f653190"} Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.442252 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"660aa765-d3f6-4673-a4df-7e4b46ab60ac","Type":"ContainerStarted","Data":"e858ff3776cd796c5a895df5624518e5fa044eb9b5323019cf43d09979385130"} Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.446155 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-drpzs" event={"ID":"e9078f85-91e5-4084-8f5d-36ec6878cb05","Type":"ContainerStarted","Data":"9ca0bd41f63305ca8074fd0c85415cda0dc61b359e821f6d5102af0842ff66cf"} Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.454707 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-65fpg" event={"ID":"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9","Type":"ContainerStarted","Data":"5aae7824c5cdae00328f7ad62eb45731d64cbb27120fe5be5601865c621d8c74"} Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.535440 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-nhtch"] Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.581799 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-bgpns"] Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.752031 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-82dzj"] Feb 02 17:08:35 crc kubenswrapper[4835]: W0202 17:08:35.755011 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb0dd549_94f1_45b7_85c6_96039b500f39.slice/crio-efb693de69095640b9bf8036589f06a52442f01da80e53494728777996530613 WatchSource:0}: Error finding container efb693de69095640b9bf8036589f06a52442f01da80e53494728777996530613: Status 404 returned error can't find the container with id efb693de69095640b9bf8036589f06a52442f01da80e53494728777996530613 Feb 02 17:08:35 crc kubenswrapper[4835]: I0202 17:08:35.952476 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.031958 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-sb\") pod \"53dc7208-924f-43a2-9828-779f33c0ed18\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.032255 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcxsb\" (UniqueName: \"kubernetes.io/projected/53dc7208-924f-43a2-9828-779f33c0ed18-kube-api-access-rcxsb\") pod \"53dc7208-924f-43a2-9828-779f33c0ed18\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.032288 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-nb\") pod \"53dc7208-924f-43a2-9828-779f33c0ed18\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.032395 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-config\") pod \"53dc7208-924f-43a2-9828-779f33c0ed18\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.032421 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-dns-svc\") pod \"53dc7208-924f-43a2-9828-779f33c0ed18\" (UID: \"53dc7208-924f-43a2-9828-779f33c0ed18\") " Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.058783 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "53dc7208-924f-43a2-9828-779f33c0ed18" (UID: "53dc7208-924f-43a2-9828-779f33c0ed18"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.059596 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-config" (OuterVolumeSpecName: "config") pod "53dc7208-924f-43a2-9828-779f33c0ed18" (UID: "53dc7208-924f-43a2-9828-779f33c0ed18"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.062968 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "53dc7208-924f-43a2-9828-779f33c0ed18" (UID: "53dc7208-924f-43a2-9828-779f33c0ed18"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.076972 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53dc7208-924f-43a2-9828-779f33c0ed18-kube-api-access-rcxsb" (OuterVolumeSpecName: "kube-api-access-rcxsb") pod "53dc7208-924f-43a2-9828-779f33c0ed18" (UID: "53dc7208-924f-43a2-9828-779f33c0ed18"). InnerVolumeSpecName "kube-api-access-rcxsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.087250 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "53dc7208-924f-43a2-9828-779f33c0ed18" (UID: "53dc7208-924f-43a2-9828-779f33c0ed18"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.134775 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.134812 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcxsb\" (UniqueName: \"kubernetes.io/projected/53dc7208-924f-43a2-9828-779f33c0ed18-kube-api-access-rcxsb\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.134823 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.134833 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.134843 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53dc7208-924f-43a2-9828-779f33c0ed18-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.485939 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.507500 4835 generic.go:334] "Generic (PLEG): container finished" podID="eb0dd549-94f1-45b7-85c6-96039b500f39" containerID="7f5aded104bdf926575d1260f4bd38781382455dcb2a364a79a67ed3a80800db" exitCode=0 Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.507556 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" event={"ID":"eb0dd549-94f1-45b7-85c6-96039b500f39","Type":"ContainerDied","Data":"7f5aded104bdf926575d1260f4bd38781382455dcb2a364a79a67ed3a80800db"} Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.507601 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" event={"ID":"eb0dd549-94f1-45b7-85c6-96039b500f39","Type":"ContainerStarted","Data":"efb693de69095640b9bf8036589f06a52442f01da80e53494728777996530613"} Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.510526 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-drpzs" event={"ID":"e9078f85-91e5-4084-8f5d-36ec6878cb05","Type":"ContainerStarted","Data":"d1e4ec6585a148572f18d9d45b46bd74d07faf1a6518056dacc55744cd1ac570"} Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.545398 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-nhtch" event={"ID":"08cf9281-9a97-420d-b734-735a7975dfe9","Type":"ContainerStarted","Data":"f595328f1d79f0e54fa9f5b66473367fa9fc808086093ab46de6bc6f1c57d15e"} Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.554613 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-jjlmd" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.555461 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-jjlmd" event={"ID":"53dc7208-924f-43a2-9828-779f33c0ed18","Type":"ContainerDied","Data":"1273ad488545a422a7976f884b9997d6be41fd6dc7add0b21f8dc23e287fc3bf"} Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.555524 4835 scope.go:117] "RemoveContainer" containerID="02f71d50a62c955c0f9015645f44e5b51f999dfae40ba7dc2a016351d6619667" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.577327 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5nshn" event={"ID":"5b04049d-f2c7-4368-969b-4b5d1d4628b8","Type":"ContainerStarted","Data":"7bffce90b5ea58660419802f3741f27e83986bad2e33b2133aad22d7bc39ec47"} Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.592856 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-bgpns" event={"ID":"6bbf2f76-2a57-4df0-989c-3a55710ef86c","Type":"ContainerStarted","Data":"1b37dbd1905646cb0e6f0a749dbc55874d37542c47d0e5314ac17e1ecb11c9c7"} Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.602301 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-5nshn" podStartSLOduration=3.602281182 podStartE2EDuration="3.602281182s" podCreationTimestamp="2026-02-02 17:08:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:08:36.601032777 +0000 UTC m=+1108.222636867" watchObservedRunningTime="2026-02-02 17:08:36.602281182 +0000 UTC m=+1108.223885262" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.604368 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-drpzs" podStartSLOduration=3.604359611 podStartE2EDuration="3.604359611s" podCreationTimestamp="2026-02-02 17:08:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:08:36.565171373 +0000 UTC m=+1108.186775453" watchObservedRunningTime="2026-02-02 17:08:36.604359611 +0000 UTC m=+1108.225963691" Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.734410 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-jjlmd"] Feb 02 17:08:36 crc kubenswrapper[4835]: I0202 17:08:36.764382 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-jjlmd"] Feb 02 17:08:37 crc kubenswrapper[4835]: I0202 17:08:37.219898 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53dc7208-924f-43a2-9828-779f33c0ed18" path="/var/lib/kubelet/pods/53dc7208-924f-43a2-9828-779f33c0ed18/volumes" Feb 02 17:08:37 crc kubenswrapper[4835]: I0202 17:08:37.648084 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" event={"ID":"eb0dd549-94f1-45b7-85c6-96039b500f39","Type":"ContainerStarted","Data":"4bde80bce095b9e71fc44a95b8656cfa56704328e0611900c35b621b3dbcda02"} Feb 02 17:08:37 crc kubenswrapper[4835]: I0202 17:08:37.648320 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:37 crc kubenswrapper[4835]: I0202 17:08:37.670080 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" podStartSLOduration=3.670061842 podStartE2EDuration="3.670061842s" podCreationTimestamp="2026-02-02 17:08:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:08:37.669316631 +0000 UTC m=+1109.290920711" watchObservedRunningTime="2026-02-02 17:08:37.670061842 +0000 UTC m=+1109.291665922" Feb 02 17:08:41 crc kubenswrapper[4835]: I0202 17:08:41.697096 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-drpzs" event={"ID":"e9078f85-91e5-4084-8f5d-36ec6878cb05","Type":"ContainerDied","Data":"d1e4ec6585a148572f18d9d45b46bd74d07faf1a6518056dacc55744cd1ac570"} Feb 02 17:08:41 crc kubenswrapper[4835]: I0202 17:08:41.697055 4835 generic.go:334] "Generic (PLEG): container finished" podID="e9078f85-91e5-4084-8f5d-36ec6878cb05" containerID="d1e4ec6585a148572f18d9d45b46bd74d07faf1a6518056dacc55744cd1ac570" exitCode=0 Feb 02 17:08:44 crc kubenswrapper[4835]: I0202 17:08:44.837128 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:08:44 crc kubenswrapper[4835]: I0202 17:08:44.900822 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-v2pgc"] Feb 02 17:08:44 crc kubenswrapper[4835]: I0202 17:08:44.901077 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" podUID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" containerName="dnsmasq-dns" containerID="cri-o://1c27ae4a784f19d6cdb906c903704d2078d41d83ef772405698725ded1fcfeda" gracePeriod=10 Feb 02 17:08:45 crc kubenswrapper[4835]: I0202 17:08:45.688318 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" podUID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.123:5353: connect: connection refused" Feb 02 17:08:45 crc kubenswrapper[4835]: I0202 17:08:45.731879 4835 generic.go:334] "Generic (PLEG): container finished" podID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" containerID="1c27ae4a784f19d6cdb906c903704d2078d41d83ef772405698725ded1fcfeda" exitCode=0 Feb 02 17:08:45 crc kubenswrapper[4835]: I0202 17:08:45.731927 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" event={"ID":"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a","Type":"ContainerDied","Data":"1c27ae4a784f19d6cdb906c903704d2078d41d83ef772405698725ded1fcfeda"} Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.477102 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.580519 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-config-data\") pod \"e9078f85-91e5-4084-8f5d-36ec6878cb05\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.580573 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-scripts\") pod \"e9078f85-91e5-4084-8f5d-36ec6878cb05\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.580675 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm9tf\" (UniqueName: \"kubernetes.io/projected/e9078f85-91e5-4084-8f5d-36ec6878cb05-kube-api-access-jm9tf\") pod \"e9078f85-91e5-4084-8f5d-36ec6878cb05\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.580715 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-combined-ca-bundle\") pod \"e9078f85-91e5-4084-8f5d-36ec6878cb05\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.580756 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-fernet-keys\") pod \"e9078f85-91e5-4084-8f5d-36ec6878cb05\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.580804 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-credential-keys\") pod \"e9078f85-91e5-4084-8f5d-36ec6878cb05\" (UID: \"e9078f85-91e5-4084-8f5d-36ec6878cb05\") " Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.586417 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e9078f85-91e5-4084-8f5d-36ec6878cb05" (UID: "e9078f85-91e5-4084-8f5d-36ec6878cb05"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.586468 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9078f85-91e5-4084-8f5d-36ec6878cb05-kube-api-access-jm9tf" (OuterVolumeSpecName: "kube-api-access-jm9tf") pod "e9078f85-91e5-4084-8f5d-36ec6878cb05" (UID: "e9078f85-91e5-4084-8f5d-36ec6878cb05"). InnerVolumeSpecName "kube-api-access-jm9tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.606000 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e9078f85-91e5-4084-8f5d-36ec6878cb05" (UID: "e9078f85-91e5-4084-8f5d-36ec6878cb05"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.624524 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-scripts" (OuterVolumeSpecName: "scripts") pod "e9078f85-91e5-4084-8f5d-36ec6878cb05" (UID: "e9078f85-91e5-4084-8f5d-36ec6878cb05"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.632130 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-config-data" (OuterVolumeSpecName: "config-data") pod "e9078f85-91e5-4084-8f5d-36ec6878cb05" (UID: "e9078f85-91e5-4084-8f5d-36ec6878cb05"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.668320 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9078f85-91e5-4084-8f5d-36ec6878cb05" (UID: "e9078f85-91e5-4084-8f5d-36ec6878cb05"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.685011 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.685062 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.685077 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm9tf\" (UniqueName: \"kubernetes.io/projected/e9078f85-91e5-4084-8f5d-36ec6878cb05-kube-api-access-jm9tf\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.685087 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.685097 4835 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.685105 4835 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9078f85-91e5-4084-8f5d-36ec6878cb05-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.785084 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-drpzs" event={"ID":"e9078f85-91e5-4084-8f5d-36ec6878cb05","Type":"ContainerDied","Data":"9ca0bd41f63305ca8074fd0c85415cda0dc61b359e821f6d5102af0842ff66cf"} Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.785121 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ca0bd41f63305ca8074fd0c85415cda0dc61b359e821f6d5102af0842ff66cf" Feb 02 17:08:48 crc kubenswrapper[4835]: I0202 17:08:48.785168 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-drpzs" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.568984 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-drpzs"] Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.575219 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-drpzs"] Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.669831 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7w9z6"] Feb 02 17:08:49 crc kubenswrapper[4835]: E0202 17:08:49.670183 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53dc7208-924f-43a2-9828-779f33c0ed18" containerName="init" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.670207 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="53dc7208-924f-43a2-9828-779f33c0ed18" containerName="init" Feb 02 17:08:49 crc kubenswrapper[4835]: E0202 17:08:49.670225 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9078f85-91e5-4084-8f5d-36ec6878cb05" containerName="keystone-bootstrap" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.670232 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9078f85-91e5-4084-8f5d-36ec6878cb05" containerName="keystone-bootstrap" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.670389 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="53dc7208-924f-43a2-9828-779f33c0ed18" containerName="init" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.670408 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9078f85-91e5-4084-8f5d-36ec6878cb05" containerName="keystone-bootstrap" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.670968 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.673500 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.673587 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cnmxn" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.673805 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.674020 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.674186 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.678311 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7w9z6"] Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.807534 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-config-data\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.807589 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-scripts\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.807641 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-combined-ca-bundle\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.807679 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxblx\" (UniqueName: \"kubernetes.io/projected/c4534fed-f27c-4656-b496-cec6f87d9915-kube-api-access-wxblx\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.807712 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-fernet-keys\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.807736 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-credential-keys\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.909472 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-scripts\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.909522 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-combined-ca-bundle\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.909554 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxblx\" (UniqueName: \"kubernetes.io/projected/c4534fed-f27c-4656-b496-cec6f87d9915-kube-api-access-wxblx\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.909577 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-fernet-keys\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.909592 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-credential-keys\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.909685 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-config-data\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.914294 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-scripts\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.915484 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-config-data\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.915941 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-combined-ca-bundle\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.916638 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-fernet-keys\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.924894 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-credential-keys\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.925631 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxblx\" (UniqueName: \"kubernetes.io/projected/c4534fed-f27c-4656-b496-cec6f87d9915-kube-api-access-wxblx\") pod \"keystone-bootstrap-7w9z6\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:49 crc kubenswrapper[4835]: I0202 17:08:49.989778 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:08:51 crc kubenswrapper[4835]: I0202 17:08:51.247738 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9078f85-91e5-4084-8f5d-36ec6878cb05" path="/var/lib/kubelet/pods/e9078f85-91e5-4084-8f5d-36ec6878cb05/volumes" Feb 02 17:08:55 crc kubenswrapper[4835]: I0202 17:08:55.694284 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" podUID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.123:5353: i/o timeout" Feb 02 17:08:57 crc kubenswrapper[4835]: E0202 17:08:57.413195 4835 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Feb 02 17:08:57 crc kubenswrapper[4835]: E0202 17:08:57.413583 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8r2mg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-65fpg_openstack(7a6ab880-bf41-45c8-a66c-d096cf3d6eb9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 17:08:57 crc kubenswrapper[4835]: E0202 17:08:57.414977 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-65fpg" podUID="7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.662079 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.764309 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-sb\") pod \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.764401 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9c6rz\" (UniqueName: \"kubernetes.io/projected/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-kube-api-access-9c6rz\") pod \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.764426 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-config\") pod \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.764458 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-nb\") pod \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.764486 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-dns-svc\") pod \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\" (UID: \"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a\") " Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.806154 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-kube-api-access-9c6rz" (OuterVolumeSpecName: "kube-api-access-9c6rz") pod "3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" (UID: "3b773f9f-674f-4e4a-98bb-dda10bd0ff9a"). InnerVolumeSpecName "kube-api-access-9c6rz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.810204 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" (UID: "3b773f9f-674f-4e4a-98bb-dda10bd0ff9a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.815734 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-config" (OuterVolumeSpecName: "config") pod "3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" (UID: "3b773f9f-674f-4e4a-98bb-dda10bd0ff9a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.825624 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" (UID: "3b773f9f-674f-4e4a-98bb-dda10bd0ff9a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.826817 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" (UID: "3b773f9f-674f-4e4a-98bb-dda10bd0ff9a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.866114 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.866144 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9c6rz\" (UniqueName: \"kubernetes.io/projected/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-kube-api-access-9c6rz\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.866155 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.866163 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.866172 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.869643 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7w9z6"] Feb 02 17:08:57 crc kubenswrapper[4835]: W0202 17:08:57.874110 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4534fed_f27c_4656_b496_cec6f87d9915.slice/crio-72ce18f597f5062813f011bb51a1dddd1241e80d86667bd26b09178de4c41524 WatchSource:0}: Error finding container 72ce18f597f5062813f011bb51a1dddd1241e80d86667bd26b09178de4c41524: Status 404 returned error can't find the container with id 72ce18f597f5062813f011bb51a1dddd1241e80d86667bd26b09178de4c41524 Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.881473 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"660aa765-d3f6-4673-a4df-7e4b46ab60ac","Type":"ContainerStarted","Data":"7176eb1ad20a29f9434aadf7c9c8e4b49b3faffee72a94c9f3cc4d7bba09a7eb"} Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.883118 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-nhtch" event={"ID":"08cf9281-9a97-420d-b734-735a7975dfe9","Type":"ContainerStarted","Data":"dc3360bcb7da5b66db4f6628de0e7ae9395bde1426cf06961a947c59b9908940"} Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.886096 4835 generic.go:334] "Generic (PLEG): container finished" podID="5b04049d-f2c7-4368-969b-4b5d1d4628b8" containerID="7bffce90b5ea58660419802f3741f27e83986bad2e33b2133aad22d7bc39ec47" exitCode=0 Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.886165 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5nshn" event={"ID":"5b04049d-f2c7-4368-969b-4b5d1d4628b8","Type":"ContainerDied","Data":"7bffce90b5ea58660419802f3741f27e83986bad2e33b2133aad22d7bc39ec47"} Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.887634 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-bgpns" event={"ID":"6bbf2f76-2a57-4df0-989c-3a55710ef86c","Type":"ContainerStarted","Data":"5685c3891e165f448507a9f1a25d9802b4ae076323754161deb494c7573ff008"} Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.889955 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.893483 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" event={"ID":"3b773f9f-674f-4e4a-98bb-dda10bd0ff9a","Type":"ContainerDied","Data":"bc80aadcff0ec6141a51f4471828fc1c10335c7fc94eb1876f506170fde048f8"} Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.893553 4835 scope.go:117] "RemoveContainer" containerID="1c27ae4a784f19d6cdb906c903704d2078d41d83ef772405698725ded1fcfeda" Feb 02 17:08:57 crc kubenswrapper[4835]: E0202 17:08:57.894491 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-65fpg" podUID="7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.924105 4835 scope.go:117] "RemoveContainer" containerID="6c16aed4a64a992ab33d2c590b447adb0d3f54d0bcb058b817cc426b475263f8" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.937357 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-nhtch" podStartSLOduration=2.080259852 podStartE2EDuration="23.93733995s" podCreationTimestamp="2026-02-02 17:08:34 +0000 UTC" firstStartedPulling="2026-02-02 17:08:35.548553769 +0000 UTC m=+1107.170157849" lastFinishedPulling="2026-02-02 17:08:57.405633867 +0000 UTC m=+1129.027237947" observedRunningTime="2026-02-02 17:08:57.902806004 +0000 UTC m=+1129.524410104" watchObservedRunningTime="2026-02-02 17:08:57.93733995 +0000 UTC m=+1129.558944030" Feb 02 17:08:57 crc kubenswrapper[4835]: I0202 17:08:57.983334 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-bgpns" podStartSLOduration=2.188643995 podStartE2EDuration="23.983310919s" podCreationTimestamp="2026-02-02 17:08:34 +0000 UTC" firstStartedPulling="2026-02-02 17:08:35.605683293 +0000 UTC m=+1107.227287373" lastFinishedPulling="2026-02-02 17:08:57.400350207 +0000 UTC m=+1129.021954297" observedRunningTime="2026-02-02 17:08:57.973663696 +0000 UTC m=+1129.595267776" watchObservedRunningTime="2026-02-02 17:08:57.983310919 +0000 UTC m=+1129.604914999" Feb 02 17:08:58 crc kubenswrapper[4835]: I0202 17:08:58.008068 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-v2pgc"] Feb 02 17:08:58 crc kubenswrapper[4835]: I0202 17:08:58.019990 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-v2pgc"] Feb 02 17:08:58 crc kubenswrapper[4835]: I0202 17:08:58.918953 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7w9z6" event={"ID":"c4534fed-f27c-4656-b496-cec6f87d9915","Type":"ContainerStarted","Data":"31f44bb6e34fca83d5d21668588deee6a142c2ef364147899a00db58828e5e82"} Feb 02 17:08:58 crc kubenswrapper[4835]: I0202 17:08:58.919373 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7w9z6" event={"ID":"c4534fed-f27c-4656-b496-cec6f87d9915","Type":"ContainerStarted","Data":"72ce18f597f5062813f011bb51a1dddd1241e80d86667bd26b09178de4c41524"} Feb 02 17:08:58 crc kubenswrapper[4835]: I0202 17:08:58.939702 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7w9z6" podStartSLOduration=9.93966979 podStartE2EDuration="9.93966979s" podCreationTimestamp="2026-02-02 17:08:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:08:58.935942485 +0000 UTC m=+1130.557546565" watchObservedRunningTime="2026-02-02 17:08:58.93966979 +0000 UTC m=+1130.561273860" Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.203400 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" path="/var/lib/kubelet/pods/3b773f9f-674f-4e4a-98bb-dda10bd0ff9a/volumes" Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.369469 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.500443 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-combined-ca-bundle\") pod \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.500558 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-config\") pod \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.500651 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkzk5\" (UniqueName: \"kubernetes.io/projected/5b04049d-f2c7-4368-969b-4b5d1d4628b8-kube-api-access-rkzk5\") pod \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\" (UID: \"5b04049d-f2c7-4368-969b-4b5d1d4628b8\") " Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.516609 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b04049d-f2c7-4368-969b-4b5d1d4628b8-kube-api-access-rkzk5" (OuterVolumeSpecName: "kube-api-access-rkzk5") pod "5b04049d-f2c7-4368-969b-4b5d1d4628b8" (UID: "5b04049d-f2c7-4368-969b-4b5d1d4628b8"). InnerVolumeSpecName "kube-api-access-rkzk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.525695 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5b04049d-f2c7-4368-969b-4b5d1d4628b8" (UID: "5b04049d-f2c7-4368-969b-4b5d1d4628b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.529166 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-config" (OuterVolumeSpecName: "config") pod "5b04049d-f2c7-4368-969b-4b5d1d4628b8" (UID: "5b04049d-f2c7-4368-969b-4b5d1d4628b8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.603529 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.603588 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/5b04049d-f2c7-4368-969b-4b5d1d4628b8-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.603600 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkzk5\" (UniqueName: \"kubernetes.io/projected/5b04049d-f2c7-4368-969b-4b5d1d4628b8-kube-api-access-rkzk5\") on node \"crc\" DevicePath \"\"" Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.929371 4835 generic.go:334] "Generic (PLEG): container finished" podID="08cf9281-9a97-420d-b734-735a7975dfe9" containerID="dc3360bcb7da5b66db4f6628de0e7ae9395bde1426cf06961a947c59b9908940" exitCode=0 Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.929453 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-nhtch" event={"ID":"08cf9281-9a97-420d-b734-735a7975dfe9","Type":"ContainerDied","Data":"dc3360bcb7da5b66db4f6628de0e7ae9395bde1426cf06961a947c59b9908940"} Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.930689 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5nshn" event={"ID":"5b04049d-f2c7-4368-969b-4b5d1d4628b8","Type":"ContainerDied","Data":"c821a1bb3c389d62fa0d871e042fa4a092e9d3f0ef5f04dcda88c9638f653190"} Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.930723 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c821a1bb3c389d62fa0d871e042fa4a092e9d3f0ef5f04dcda88c9638f653190" Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.930783 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5nshn" Feb 02 17:08:59 crc kubenswrapper[4835]: I0202 17:08:59.940139 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"660aa765-d3f6-4673-a4df-7e4b46ab60ac","Type":"ContainerStarted","Data":"ca4f044e8a4403a11c7d3bf26b6da66c19176b2e0e401cdfb813d2c967882b00"} Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.273037 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-6tgfz"] Feb 02 17:09:00 crc kubenswrapper[4835]: E0202 17:09:00.273453 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" containerName="dnsmasq-dns" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.273475 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" containerName="dnsmasq-dns" Feb 02 17:09:00 crc kubenswrapper[4835]: E0202 17:09:00.273495 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" containerName="init" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.273503 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" containerName="init" Feb 02 17:09:00 crc kubenswrapper[4835]: E0202 17:09:00.273534 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b04049d-f2c7-4368-969b-4b5d1d4628b8" containerName="neutron-db-sync" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.273542 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b04049d-f2c7-4368-969b-4b5d1d4628b8" containerName="neutron-db-sync" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.273757 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b04049d-f2c7-4368-969b-4b5d1d4628b8" containerName="neutron-db-sync" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.273781 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" containerName="dnsmasq-dns" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.316433 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-6tgfz"] Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.316533 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.371087 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5958d6c764-v628z"] Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.430426 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5958d6c764-v628z"] Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.430548 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.432413 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.432449 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.432487 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.432587 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-config\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.432614 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqzk7\" (UniqueName: \"kubernetes.io/projected/af37c205-e3c2-43d3-a0df-9fb3e8629f87-kube-api-access-wqzk7\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.436710 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-frjx6" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.436948 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.437177 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.443740 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.535341 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-config\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.535390 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqzk7\" (UniqueName: \"kubernetes.io/projected/af37c205-e3c2-43d3-a0df-9fb3e8629f87-kube-api-access-wqzk7\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.535447 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.535478 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.535524 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.535550 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-httpd-config\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.535583 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-ovndb-tls-certs\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.535607 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p27g\" (UniqueName: \"kubernetes.io/projected/30738051-1c87-4817-9ebf-7cdf056c4a2f-kube-api-access-5p27g\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.535630 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-config\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.535707 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-combined-ca-bundle\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.536722 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.536822 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.537235 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.537352 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-config\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.562191 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqzk7\" (UniqueName: \"kubernetes.io/projected/af37c205-e3c2-43d3-a0df-9fb3e8629f87-kube-api-access-wqzk7\") pod \"dnsmasq-dns-5f66db59b9-6tgfz\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.637150 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-httpd-config\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.637191 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-ovndb-tls-certs\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.637215 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p27g\" (UniqueName: \"kubernetes.io/projected/30738051-1c87-4817-9ebf-7cdf056c4a2f-kube-api-access-5p27g\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.637237 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-config\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.637342 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-combined-ca-bundle\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.640134 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-httpd-config\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.640731 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-combined-ca-bundle\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.643709 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-config\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.647629 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.653927 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-ovndb-tls-certs\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.661922 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p27g\" (UniqueName: \"kubernetes.io/projected/30738051-1c87-4817-9ebf-7cdf056c4a2f-kube-api-access-5p27g\") pod \"neutron-5958d6c764-v628z\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.695855 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-554567b4f7-v2pgc" podUID="3b773f9f-674f-4e4a-98bb-dda10bd0ff9a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.123:5353: i/o timeout" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.760925 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.964044 4835 generic.go:334] "Generic (PLEG): container finished" podID="6bbf2f76-2a57-4df0-989c-3a55710ef86c" containerID="5685c3891e165f448507a9f1a25d9802b4ae076323754161deb494c7573ff008" exitCode=0 Feb 02 17:09:00 crc kubenswrapper[4835]: I0202 17:09:00.964509 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-bgpns" event={"ID":"6bbf2f76-2a57-4df0-989c-3a55710ef86c","Type":"ContainerDied","Data":"5685c3891e165f448507a9f1a25d9802b4ae076323754161deb494c7573ff008"} Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.223216 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-6tgfz"] Feb 02 17:09:01 crc kubenswrapper[4835]: W0202 17:09:01.246985 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf37c205_e3c2_43d3_a0df_9fb3e8629f87.slice/crio-8e294e0d376ee72e4c83f843348d54fa7f952adc3cda980280d0d69a1ee70835 WatchSource:0}: Error finding container 8e294e0d376ee72e4c83f843348d54fa7f952adc3cda980280d0d69a1ee70835: Status 404 returned error can't find the container with id 8e294e0d376ee72e4c83f843348d54fa7f952adc3cda980280d0d69a1ee70835 Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.416187 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-nhtch" Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.558105 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-scripts\") pod \"08cf9281-9a97-420d-b734-735a7975dfe9\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.558384 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-config-data\") pod \"08cf9281-9a97-420d-b734-735a7975dfe9\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.558421 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-combined-ca-bundle\") pod \"08cf9281-9a97-420d-b734-735a7975dfe9\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.558503 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7lpx\" (UniqueName: \"kubernetes.io/projected/08cf9281-9a97-420d-b734-735a7975dfe9-kube-api-access-t7lpx\") pod \"08cf9281-9a97-420d-b734-735a7975dfe9\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.558576 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08cf9281-9a97-420d-b734-735a7975dfe9-logs\") pod \"08cf9281-9a97-420d-b734-735a7975dfe9\" (UID: \"08cf9281-9a97-420d-b734-735a7975dfe9\") " Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.559256 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08cf9281-9a97-420d-b734-735a7975dfe9-logs" (OuterVolumeSpecName: "logs") pod "08cf9281-9a97-420d-b734-735a7975dfe9" (UID: "08cf9281-9a97-420d-b734-735a7975dfe9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.577227 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08cf9281-9a97-420d-b734-735a7975dfe9-kube-api-access-t7lpx" (OuterVolumeSpecName: "kube-api-access-t7lpx") pod "08cf9281-9a97-420d-b734-735a7975dfe9" (UID: "08cf9281-9a97-420d-b734-735a7975dfe9"). InnerVolumeSpecName "kube-api-access-t7lpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.577319 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-scripts" (OuterVolumeSpecName: "scripts") pod "08cf9281-9a97-420d-b734-735a7975dfe9" (UID: "08cf9281-9a97-420d-b734-735a7975dfe9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.585502 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08cf9281-9a97-420d-b734-735a7975dfe9" (UID: "08cf9281-9a97-420d-b734-735a7975dfe9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.607073 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5958d6c764-v628z"] Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.607398 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-config-data" (OuterVolumeSpecName: "config-data") pod "08cf9281-9a97-420d-b734-735a7975dfe9" (UID: "08cf9281-9a97-420d-b734-735a7975dfe9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.661057 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.661096 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.661109 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08cf9281-9a97-420d-b734-735a7975dfe9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.661121 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7lpx\" (UniqueName: \"kubernetes.io/projected/08cf9281-9a97-420d-b734-735a7975dfe9-kube-api-access-t7lpx\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:01 crc kubenswrapper[4835]: I0202 17:09:01.661133 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08cf9281-9a97-420d-b734-735a7975dfe9-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.011535 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5958d6c764-v628z" event={"ID":"30738051-1c87-4817-9ebf-7cdf056c4a2f","Type":"ContainerStarted","Data":"6b21687bcdf10a9f8ea62b1d1352dc16b88506d6b7d32864c96a6538d1d3678f"} Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.011794 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5958d6c764-v628z" event={"ID":"30738051-1c87-4817-9ebf-7cdf056c4a2f","Type":"ContainerStarted","Data":"7c700c232dd6dc037a5d8765bff71a773ab274d52536fa93d3b20118c31c8bf0"} Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.013572 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-nhtch" event={"ID":"08cf9281-9a97-420d-b734-735a7975dfe9","Type":"ContainerDied","Data":"f595328f1d79f0e54fa9f5b66473367fa9fc808086093ab46de6bc6f1c57d15e"} Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.013616 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f595328f1d79f0e54fa9f5b66473367fa9fc808086093ab46de6bc6f1c57d15e" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.013701 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-nhtch" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.020516 4835 generic.go:334] "Generic (PLEG): container finished" podID="c4534fed-f27c-4656-b496-cec6f87d9915" containerID="31f44bb6e34fca83d5d21668588deee6a142c2ef364147899a00db58828e5e82" exitCode=0 Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.020594 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7w9z6" event={"ID":"c4534fed-f27c-4656-b496-cec6f87d9915","Type":"ContainerDied","Data":"31f44bb6e34fca83d5d21668588deee6a142c2ef364147899a00db58828e5e82"} Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.025481 4835 generic.go:334] "Generic (PLEG): container finished" podID="af37c205-e3c2-43d3-a0df-9fb3e8629f87" containerID="6bb1a423dfda41f2f60072f6535aedf014fa8e5b5c71ac1034f3795e9529485b" exitCode=0 Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.025690 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" event={"ID":"af37c205-e3c2-43d3-a0df-9fb3e8629f87","Type":"ContainerDied","Data":"6bb1a423dfda41f2f60072f6535aedf014fa8e5b5c71ac1034f3795e9529485b"} Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.025720 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" event={"ID":"af37c205-e3c2-43d3-a0df-9fb3e8629f87","Type":"ContainerStarted","Data":"8e294e0d376ee72e4c83f843348d54fa7f952adc3cda980280d0d69a1ee70835"} Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.087531 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8b8949f68-7cjhk"] Feb 02 17:09:02 crc kubenswrapper[4835]: E0202 17:09:02.087867 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08cf9281-9a97-420d-b734-735a7975dfe9" containerName="placement-db-sync" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.087881 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="08cf9281-9a97-420d-b734-735a7975dfe9" containerName="placement-db-sync" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.088032 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="08cf9281-9a97-420d-b734-735a7975dfe9" containerName="placement-db-sync" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.088833 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.097816 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.097948 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-ng24p" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.098218 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.098408 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.099126 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.106668 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8b8949f68-7cjhk"] Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.275709 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-config-data\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.275782 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-logs\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.275816 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-combined-ca-bundle\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.275835 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-scripts\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.275864 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-public-tls-certs\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.275884 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfxt8\" (UniqueName: \"kubernetes.io/projected/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-kube-api-access-xfxt8\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.275908 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-internal-tls-certs\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.377571 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-config-data\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.377971 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-logs\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.378026 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-combined-ca-bundle\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.378057 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-scripts\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.378102 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-public-tls-certs\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.378133 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfxt8\" (UniqueName: \"kubernetes.io/projected/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-kube-api-access-xfxt8\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.378167 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-internal-tls-certs\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.383888 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-logs\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.388814 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-scripts\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.389250 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-internal-tls-certs\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.389835 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-combined-ca-bundle\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.390067 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-public-tls-certs\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.390896 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-config-data\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.418176 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfxt8\" (UniqueName: \"kubernetes.io/projected/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-kube-api-access-xfxt8\") pod \"placement-8b8949f68-7cjhk\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.438731 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.439880 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-bgpns" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.581365 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-combined-ca-bundle\") pod \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.581548 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-db-sync-config-data\") pod \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.581619 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9gxp\" (UniqueName: \"kubernetes.io/projected/6bbf2f76-2a57-4df0-989c-3a55710ef86c-kube-api-access-t9gxp\") pod \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\" (UID: \"6bbf2f76-2a57-4df0-989c-3a55710ef86c\") " Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.588917 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bbf2f76-2a57-4df0-989c-3a55710ef86c-kube-api-access-t9gxp" (OuterVolumeSpecName: "kube-api-access-t9gxp") pod "6bbf2f76-2a57-4df0-989c-3a55710ef86c" (UID: "6bbf2f76-2a57-4df0-989c-3a55710ef86c"). InnerVolumeSpecName "kube-api-access-t9gxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.589805 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6bbf2f76-2a57-4df0-989c-3a55710ef86c" (UID: "6bbf2f76-2a57-4df0-989c-3a55710ef86c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.667645 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6bbf2f76-2a57-4df0-989c-3a55710ef86c" (UID: "6bbf2f76-2a57-4df0-989c-3a55710ef86c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.683153 4835 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.683185 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9gxp\" (UniqueName: \"kubernetes.io/projected/6bbf2f76-2a57-4df0-989c-3a55710ef86c-kube-api-access-t9gxp\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.683195 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bbf2f76-2a57-4df0-989c-3a55710ef86c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.788779 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7874ff7b65-42jrg"] Feb 02 17:09:02 crc kubenswrapper[4835]: E0202 17:09:02.789171 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bbf2f76-2a57-4df0-989c-3a55710ef86c" containerName="barbican-db-sync" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.789188 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bbf2f76-2a57-4df0-989c-3a55710ef86c" containerName="barbican-db-sync" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.789379 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bbf2f76-2a57-4df0-989c-3a55710ef86c" containerName="barbican-db-sync" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.790135 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.797238 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.802563 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.803505 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7874ff7b65-42jrg"] Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.890263 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-ovndb-tls-certs\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.890328 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-config\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.890360 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-public-tls-certs\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.890422 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bk2c\" (UniqueName: \"kubernetes.io/projected/8b10dcfd-03a0-478a-87c1-f6d87260571a-kube-api-access-6bk2c\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.890550 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-combined-ca-bundle\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.890610 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-httpd-config\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.890729 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-internal-tls-certs\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.931904 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8b8949f68-7cjhk"] Feb 02 17:09:02 crc kubenswrapper[4835]: W0202 17:09:02.943003 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda66823b3_c9eb_44bd_a7cc_56c1b5f780c1.slice/crio-9dbf5fb258595105f66d61759b0724dba175cca3e2708733be95c5cf180e92a1 WatchSource:0}: Error finding container 9dbf5fb258595105f66d61759b0724dba175cca3e2708733be95c5cf180e92a1: Status 404 returned error can't find the container with id 9dbf5fb258595105f66d61759b0724dba175cca3e2708733be95c5cf180e92a1 Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.992123 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-internal-tls-certs\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.992215 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-ovndb-tls-certs\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.992264 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-config\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.992315 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-public-tls-certs\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.992386 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bk2c\" (UniqueName: \"kubernetes.io/projected/8b10dcfd-03a0-478a-87c1-f6d87260571a-kube-api-access-6bk2c\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.992415 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-combined-ca-bundle\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.992436 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-httpd-config\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:02 crc kubenswrapper[4835]: I0202 17:09:02.996004 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-internal-tls-certs\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.001232 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-config\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.001699 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-ovndb-tls-certs\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.002054 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-httpd-config\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.013109 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-public-tls-certs\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.014166 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bk2c\" (UniqueName: \"kubernetes.io/projected/8b10dcfd-03a0-478a-87c1-f6d87260571a-kube-api-access-6bk2c\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.026754 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-combined-ca-bundle\") pod \"neutron-7874ff7b65-42jrg\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.060024 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" event={"ID":"af37c205-e3c2-43d3-a0df-9fb3e8629f87","Type":"ContainerStarted","Data":"d6242456075e18d217ec38a5e9eb69e8241a701fc7240e4df1dbd643e84fbace"} Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.061365 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.081114 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5958d6c764-v628z" event={"ID":"30738051-1c87-4817-9ebf-7cdf056c4a2f","Type":"ContainerStarted","Data":"589b6742a0cb8137f5eb8e4a1452ff1c498c2b58e3763c1dbe00f0cf2b690c8e"} Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.081586 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.085212 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8b8949f68-7cjhk" event={"ID":"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1","Type":"ContainerStarted","Data":"9dbf5fb258595105f66d61759b0724dba175cca3e2708733be95c5cf180e92a1"} Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.091376 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-bgpns" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.102803 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-bgpns" event={"ID":"6bbf2f76-2a57-4df0-989c-3a55710ef86c","Type":"ContainerDied","Data":"1b37dbd1905646cb0e6f0a749dbc55874d37542c47d0e5314ac17e1ecb11c9c7"} Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.102883 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b37dbd1905646cb0e6f0a749dbc55874d37542c47d0e5314ac17e1ecb11c9c7" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.108944 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" podStartSLOduration=3.108917579 podStartE2EDuration="3.108917579s" podCreationTimestamp="2026-02-02 17:09:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:03.102150148 +0000 UTC m=+1134.723754238" watchObservedRunningTime="2026-02-02 17:09:03.108917579 +0000 UTC m=+1134.730521659" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.139960 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.320318 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5958d6c764-v628z" podStartSLOduration=3.320298151 podStartE2EDuration="3.320298151s" podCreationTimestamp="2026-02-02 17:09:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:03.200612649 +0000 UTC m=+1134.822216729" watchObservedRunningTime="2026-02-02 17:09:03.320298151 +0000 UTC m=+1134.941902231" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.325226 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-547cf5b9f-w4hcp"] Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.334437 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.348417 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-547cf5b9f-w4hcp"] Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.355595 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.355762 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.361782 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-qn5jg" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.385570 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-697bf5f454-lm7b8"] Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.388522 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.394071 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.416248 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-697bf5f454-lm7b8"] Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.509231 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-combined-ca-bundle\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.509308 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data-custom\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.509334 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg4h9\" (UniqueName: \"kubernetes.io/projected/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-kube-api-access-rg4h9\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.509366 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.509394 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-logs\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.509418 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18088c0c-a5e2-4721-a488-8970de4a6277-logs\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.509435 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7mcl\" (UniqueName: \"kubernetes.io/projected/18088c0c-a5e2-4721-a488-8970de4a6277-kube-api-access-c7mcl\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.509461 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.509479 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-combined-ca-bundle\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.509516 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data-custom\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:03 crc kubenswrapper[4835]: I0202 17:09:03.538511 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-6tgfz"] Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.617712 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-869f779d85-mgjw6"] Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.624904 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.634622 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-mgjw6"] Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641594 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data-custom\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641658 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-config\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641690 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-combined-ca-bundle\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641724 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data-custom\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641748 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg4h9\" (UniqueName: \"kubernetes.io/projected/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-kube-api-access-rg4h9\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641782 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641802 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641818 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641837 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-dns-svc\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641863 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-logs\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641889 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18088c0c-a5e2-4721-a488-8970de4a6277-logs\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641906 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw4ms\" (UniqueName: \"kubernetes.io/projected/9768010f-5f61-48d8-883c-d6cf020cfdf1-kube-api-access-fw4ms\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641924 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7mcl\" (UniqueName: \"kubernetes.io/projected/18088c0c-a5e2-4721-a488-8970de4a6277-kube-api-access-c7mcl\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641953 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.641973 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-combined-ca-bundle\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.647835 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.648399 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-combined-ca-bundle\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.649184 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18088c0c-a5e2-4721-a488-8970de4a6277-logs\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.649542 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-logs\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.651370 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data-custom\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.652176 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data-custom\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.654834 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-combined-ca-bundle\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.658756 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.667892 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5df75c588d-8mgh7"] Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.678416 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.680861 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.689885 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7mcl\" (UniqueName: \"kubernetes.io/projected/18088c0c-a5e2-4721-a488-8970de4a6277-kube-api-access-c7mcl\") pod \"barbican-worker-547cf5b9f-w4hcp\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.702834 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5df75c588d-8mgh7"] Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.709115 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg4h9\" (UniqueName: \"kubernetes.io/projected/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-kube-api-access-rg4h9\") pod \"barbican-keystone-listener-697bf5f454-lm7b8\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.730598 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.746132 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.746175 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.746195 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.746213 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-dns-svc\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.746299 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g898p\" (UniqueName: \"kubernetes.io/projected/2f10a70c-3a54-43cc-8848-cfe8e14cea67-kube-api-access-g898p\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.746324 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw4ms\" (UniqueName: \"kubernetes.io/projected/9768010f-5f61-48d8-883c-d6cf020cfdf1-kube-api-access-fw4ms\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.747914 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.747995 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-combined-ca-bundle\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.748048 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data-custom\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.748075 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f10a70c-3a54-43cc-8848-cfe8e14cea67-logs\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.748107 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-config\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.748642 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.749887 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-dns-svc\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.752005 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-config\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.767958 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw4ms\" (UniqueName: \"kubernetes.io/projected/9768010f-5f61-48d8-883c-d6cf020cfdf1-kube-api-access-fw4ms\") pod \"dnsmasq-dns-869f779d85-mgjw6\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.849499 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g898p\" (UniqueName: \"kubernetes.io/projected/2f10a70c-3a54-43cc-8848-cfe8e14cea67-kube-api-access-g898p\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.850017 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-combined-ca-bundle\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.850073 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data-custom\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.850102 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f10a70c-3a54-43cc-8848-cfe8e14cea67-logs\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.850196 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.850847 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f10a70c-3a54-43cc-8848-cfe8e14cea67-logs\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.855309 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-combined-ca-bundle\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.855783 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data-custom\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.859235 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.866865 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g898p\" (UniqueName: \"kubernetes.io/projected/2f10a70c-3a54-43cc-8848-cfe8e14cea67-kube-api-access-g898p\") pod \"barbican-api-5df75c588d-8mgh7\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:03.968092 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:04.058331 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:04 crc kubenswrapper[4835]: I0202 17:09:04.067794 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.115130 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" podUID="af37c205-e3c2-43d3-a0df-9fb3e8629f87" containerName="dnsmasq-dns" containerID="cri-o://d6242456075e18d217ec38a5e9eb69e8241a701fc7240e4df1dbd643e84fbace" gracePeriod=10 Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.627657 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.789478 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxblx\" (UniqueName: \"kubernetes.io/projected/c4534fed-f27c-4656-b496-cec6f87d9915-kube-api-access-wxblx\") pod \"c4534fed-f27c-4656-b496-cec6f87d9915\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.789551 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-fernet-keys\") pod \"c4534fed-f27c-4656-b496-cec6f87d9915\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.789607 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-scripts\") pod \"c4534fed-f27c-4656-b496-cec6f87d9915\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.789629 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-credential-keys\") pod \"c4534fed-f27c-4656-b496-cec6f87d9915\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.789812 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-config-data\") pod \"c4534fed-f27c-4656-b496-cec6f87d9915\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.789860 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-combined-ca-bundle\") pod \"c4534fed-f27c-4656-b496-cec6f87d9915\" (UID: \"c4534fed-f27c-4656-b496-cec6f87d9915\") " Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.796491 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c4534fed-f27c-4656-b496-cec6f87d9915" (UID: "c4534fed-f27c-4656-b496-cec6f87d9915"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.801345 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "c4534fed-f27c-4656-b496-cec6f87d9915" (UID: "c4534fed-f27c-4656-b496-cec6f87d9915"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.801464 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4534fed-f27c-4656-b496-cec6f87d9915-kube-api-access-wxblx" (OuterVolumeSpecName: "kube-api-access-wxblx") pod "c4534fed-f27c-4656-b496-cec6f87d9915" (UID: "c4534fed-f27c-4656-b496-cec6f87d9915"). InnerVolumeSpecName "kube-api-access-wxblx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.803349 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-scripts" (OuterVolumeSpecName: "scripts") pod "c4534fed-f27c-4656-b496-cec6f87d9915" (UID: "c4534fed-f27c-4656-b496-cec6f87d9915"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.844813 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4534fed-f27c-4656-b496-cec6f87d9915" (UID: "c4534fed-f27c-4656-b496-cec6f87d9915"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.858136 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-config-data" (OuterVolumeSpecName: "config-data") pod "c4534fed-f27c-4656-b496-cec6f87d9915" (UID: "c4534fed-f27c-4656-b496-cec6f87d9915"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.892159 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.892449 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.892461 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxblx\" (UniqueName: \"kubernetes.io/projected/c4534fed-f27c-4656-b496-cec6f87d9915-kube-api-access-wxblx\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.892470 4835 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.892479 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:05 crc kubenswrapper[4835]: I0202 17:09:05.892487 4835 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c4534fed-f27c-4656-b496-cec6f87d9915-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.127674 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7w9z6" event={"ID":"c4534fed-f27c-4656-b496-cec6f87d9915","Type":"ContainerDied","Data":"72ce18f597f5062813f011bb51a1dddd1241e80d86667bd26b09178de4c41524"} Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.127707 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7w9z6" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.127717 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72ce18f597f5062813f011bb51a1dddd1241e80d86667bd26b09178de4c41524" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.129996 4835 generic.go:334] "Generic (PLEG): container finished" podID="af37c205-e3c2-43d3-a0df-9fb3e8629f87" containerID="d6242456075e18d217ec38a5e9eb69e8241a701fc7240e4df1dbd643e84fbace" exitCode=0 Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.130034 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" event={"ID":"af37c205-e3c2-43d3-a0df-9fb3e8629f87","Type":"ContainerDied","Data":"d6242456075e18d217ec38a5e9eb69e8241a701fc7240e4df1dbd643e84fbace"} Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.525664 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-97d9cb6c4-nx6lx"] Feb 02 17:09:06 crc kubenswrapper[4835]: E0202 17:09:06.526399 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4534fed-f27c-4656-b496-cec6f87d9915" containerName="keystone-bootstrap" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.526468 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4534fed-f27c-4656-b496-cec6f87d9915" containerName="keystone-bootstrap" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.526686 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4534fed-f27c-4656-b496-cec6f87d9915" containerName="keystone-bootstrap" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.527707 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.530528 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.530788 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.540669 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-97d9cb6c4-nx6lx"] Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.709355 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data-custom\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.709428 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-logs\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.709470 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.709492 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-combined-ca-bundle\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.709528 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnn5f\" (UniqueName: \"kubernetes.io/projected/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-kube-api-access-pnn5f\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.709574 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-public-tls-certs\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.709639 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-internal-tls-certs\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.724772 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-577d94f4db-mdlkk"] Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.729101 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.734039 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.734201 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.734264 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.734359 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cnmxn" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.734596 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.735136 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.748318 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-577d94f4db-mdlkk"] Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.810799 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data-custom\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.810859 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-logs\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.810897 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.810919 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-combined-ca-bundle\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.810955 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnn5f\" (UniqueName: \"kubernetes.io/projected/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-kube-api-access-pnn5f\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.811007 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-public-tls-certs\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.811061 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-internal-tls-certs\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.811342 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-logs\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.815587 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-combined-ca-bundle\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.815993 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.816742 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data-custom\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.818745 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-public-tls-certs\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.828969 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-internal-tls-certs\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.831232 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnn5f\" (UniqueName: \"kubernetes.io/projected/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-kube-api-access-pnn5f\") pod \"barbican-api-97d9cb6c4-nx6lx\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.854150 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.912486 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-config-data\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.912543 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-scripts\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.912572 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-fernet-keys\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.912605 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-internal-tls-certs\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.912626 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7678x\" (UniqueName: \"kubernetes.io/projected/0a44fa56-f689-4268-9973-867224dc13ef-kube-api-access-7678x\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.912646 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-public-tls-certs\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.912663 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-credential-keys\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:06 crc kubenswrapper[4835]: I0202 17:09:06.912691 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-combined-ca-bundle\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.014168 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-credential-keys\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.014223 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-combined-ca-bundle\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.014337 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-config-data\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.014368 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-scripts\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.014394 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-fernet-keys\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.014431 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-internal-tls-certs\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.014488 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7678x\" (UniqueName: \"kubernetes.io/projected/0a44fa56-f689-4268-9973-867224dc13ef-kube-api-access-7678x\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.014518 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-public-tls-certs\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.023984 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-credential-keys\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.024134 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-combined-ca-bundle\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.024288 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-fernet-keys\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.024322 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-internal-tls-certs\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.024477 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-config-data\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.024573 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-scripts\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.024835 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a44fa56-f689-4268-9973-867224dc13ef-public-tls-certs\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.032112 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7678x\" (UniqueName: \"kubernetes.io/projected/0a44fa56-f689-4268-9973-867224dc13ef-kube-api-access-7678x\") pod \"keystone-577d94f4db-mdlkk\" (UID: \"0a44fa56-f689-4268-9973-867224dc13ef\") " pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:07 crc kubenswrapper[4835]: I0202 17:09:07.043622 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.156146 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" event={"ID":"af37c205-e3c2-43d3-a0df-9fb3e8629f87","Type":"ContainerDied","Data":"8e294e0d376ee72e4c83f843348d54fa7f952adc3cda980280d0d69a1ee70835"} Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.156380 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e294e0d376ee72e4c83f843348d54fa7f952adc3cda980280d0d69a1ee70835" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.282356 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.446102 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-dns-svc\") pod \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.446502 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-sb\") pod \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.446524 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-config\") pod \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.447171 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqzk7\" (UniqueName: \"kubernetes.io/projected/af37c205-e3c2-43d3-a0df-9fb3e8629f87-kube-api-access-wqzk7\") pod \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.447203 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-nb\") pod \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\" (UID: \"af37c205-e3c2-43d3-a0df-9fb3e8629f87\") " Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.454049 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af37c205-e3c2-43d3-a0df-9fb3e8629f87-kube-api-access-wqzk7" (OuterVolumeSpecName: "kube-api-access-wqzk7") pod "af37c205-e3c2-43d3-a0df-9fb3e8629f87" (UID: "af37c205-e3c2-43d3-a0df-9fb3e8629f87"). InnerVolumeSpecName "kube-api-access-wqzk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.499391 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-config" (OuterVolumeSpecName: "config") pod "af37c205-e3c2-43d3-a0df-9fb3e8629f87" (UID: "af37c205-e3c2-43d3-a0df-9fb3e8629f87"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.503358 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "af37c205-e3c2-43d3-a0df-9fb3e8629f87" (UID: "af37c205-e3c2-43d3-a0df-9fb3e8629f87"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.506358 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "af37c205-e3c2-43d3-a0df-9fb3e8629f87" (UID: "af37c205-e3c2-43d3-a0df-9fb3e8629f87"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.507246 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "af37c205-e3c2-43d3-a0df-9fb3e8629f87" (UID: "af37c205-e3c2-43d3-a0df-9fb3e8629f87"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.549680 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.550115 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.550168 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqzk7\" (UniqueName: \"kubernetes.io/projected/af37c205-e3c2-43d3-a0df-9fb3e8629f87-kube-api-access-wqzk7\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.550184 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.550194 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af37c205-e3c2-43d3-a0df-9fb3e8629f87-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.616494 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-547cf5b9f-w4hcp"] Feb 02 17:09:08 crc kubenswrapper[4835]: I0202 17:09:08.999296 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5df75c588d-8mgh7"] Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.012997 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-697bf5f454-lm7b8"] Feb 02 17:09:09 crc kubenswrapper[4835]: W0202 17:09:09.021786 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e3c1b42_af9e_4c9d_9808_dcf1856a9bff.slice/crio-57849e81f2534bb1f436f9a1124595aa125a1e4a1538de45987e10f2a4ad26ff WatchSource:0}: Error finding container 57849e81f2534bb1f436f9a1124595aa125a1e4a1538de45987e10f2a4ad26ff: Status 404 returned error can't find the container with id 57849e81f2534bb1f436f9a1124595aa125a1e4a1538de45987e10f2a4ad26ff Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.021833 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-mgjw6"] Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.041218 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-577d94f4db-mdlkk"] Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.052154 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-97d9cb6c4-nx6lx"] Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.171821 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7874ff7b65-42jrg"] Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.204694 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"660aa765-d3f6-4673-a4df-7e4b46ab60ac","Type":"ContainerStarted","Data":"acc386e87e7c57d82b5630dca097c063071de98c2a0ba0d468e6f8a9b602c136"} Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.204881 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8b8949f68-7cjhk" event={"ID":"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1","Type":"ContainerStarted","Data":"bf026e2f58b5c048bc4a83a5c3443dbb83c7388425a0dc6535e30501cd629092"} Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.204947 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8b8949f68-7cjhk" event={"ID":"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1","Type":"ContainerStarted","Data":"a27adc2355489323da3b7415a4f32584902e7314cd17c2bd21a4576a55b5e317"} Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.205019 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df75c588d-8mgh7" event={"ID":"2f10a70c-3a54-43cc-8848-cfe8e14cea67","Type":"ContainerStarted","Data":"d90a1f320e20b84e9152198f478559b85e0289d88b6b9138e7f4391603cd8f80"} Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.205092 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-547cf5b9f-w4hcp" event={"ID":"18088c0c-a5e2-4721-a488-8970de4a6277","Type":"ContainerStarted","Data":"0ee8a01e8e6be7ba81872762857b8b46f95bcb3e7e23e80c7e1e7117f46255ed"} Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.205154 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" event={"ID":"9768010f-5f61-48d8-883c-d6cf020cfdf1","Type":"ContainerStarted","Data":"0b6563c7f2bc3d47239c9e8a55a9efda7b2a15491dc286cb634c6bbf8d67e2f8"} Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.209533 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" event={"ID":"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff","Type":"ContainerStarted","Data":"57849e81f2534bb1f436f9a1124595aa125a1e4a1538de45987e10f2a4ad26ff"} Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.211062 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7874ff7b65-42jrg" event={"ID":"8b10dcfd-03a0-478a-87c1-f6d87260571a","Type":"ContainerStarted","Data":"b6136af9e8adaf5b17a0b52c9ea1c82b602fc3de08dc4a78cb529512c3292f52"} Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.217909 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-577d94f4db-mdlkk" event={"ID":"0a44fa56-f689-4268-9973-867224dc13ef","Type":"ContainerStarted","Data":"25e96db588582442a9c94b055d1213a70883b4337dd885bde8a44c7fa41c0b96"} Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.225480 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-97d9cb6c4-nx6lx" event={"ID":"b4cf63eb-ca5d-4d27-8aff-b659a19a7938","Type":"ContainerStarted","Data":"b8b61221be9f92cbcdd4971510eb97d4a53198f461b79ae1e52e12ffae215b1d"} Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.225579 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-6tgfz" Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.514731 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-6tgfz"] Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.536188 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-6tgfz"] Feb 02 17:09:09 crc kubenswrapper[4835]: I0202 17:09:09.543644 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-8b8949f68-7cjhk" podStartSLOduration=7.543622578 podStartE2EDuration="7.543622578s" podCreationTimestamp="2026-02-02 17:09:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:09.475425551 +0000 UTC m=+1141.097029631" watchObservedRunningTime="2026-02-02 17:09:09.543622578 +0000 UTC m=+1141.165226668" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.251112 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7874ff7b65-42jrg" event={"ID":"8b10dcfd-03a0-478a-87c1-f6d87260571a","Type":"ContainerStarted","Data":"e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4"} Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.252770 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.252866 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7874ff7b65-42jrg" event={"ID":"8b10dcfd-03a0-478a-87c1-f6d87260571a","Type":"ContainerStarted","Data":"5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5"} Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.254344 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-577d94f4db-mdlkk" event={"ID":"0a44fa56-f689-4268-9973-867224dc13ef","Type":"ContainerStarted","Data":"7b02ba10385408a8981389673b39683c8d73eb77a25cde24a0dbab95d5731814"} Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.255174 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.259621 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-97d9cb6c4-nx6lx" event={"ID":"b4cf63eb-ca5d-4d27-8aff-b659a19a7938","Type":"ContainerStarted","Data":"7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9"} Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.259665 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-97d9cb6c4-nx6lx" event={"ID":"b4cf63eb-ca5d-4d27-8aff-b659a19a7938","Type":"ContainerStarted","Data":"e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173"} Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.259726 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.260308 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.264686 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df75c588d-8mgh7" event={"ID":"2f10a70c-3a54-43cc-8848-cfe8e14cea67","Type":"ContainerStarted","Data":"e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59"} Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.265098 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df75c588d-8mgh7" event={"ID":"2f10a70c-3a54-43cc-8848-cfe8e14cea67","Type":"ContainerStarted","Data":"db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e"} Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.265775 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.266318 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.279103 4835 generic.go:334] "Generic (PLEG): container finished" podID="9768010f-5f61-48d8-883c-d6cf020cfdf1" containerID="e260eb78d58f6b9ffabef4d55a64e5a25831db6f1321808ab6d38ec936c752e4" exitCode=0 Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.279710 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" event={"ID":"9768010f-5f61-48d8-883c-d6cf020cfdf1","Type":"ContainerDied","Data":"e260eb78d58f6b9ffabef4d55a64e5a25831db6f1321808ab6d38ec936c752e4"} Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.280611 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.280700 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.280925 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7874ff7b65-42jrg" podStartSLOduration=8.280908979 podStartE2EDuration="8.280908979s" podCreationTimestamp="2026-02-02 17:09:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:10.275458244 +0000 UTC m=+1141.897062324" watchObservedRunningTime="2026-02-02 17:09:10.280908979 +0000 UTC m=+1141.902513059" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.306010 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-577d94f4db-mdlkk" podStartSLOduration=4.305993157 podStartE2EDuration="4.305993157s" podCreationTimestamp="2026-02-02 17:09:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:10.298005282 +0000 UTC m=+1141.919609382" watchObservedRunningTime="2026-02-02 17:09:10.305993157 +0000 UTC m=+1141.927597237" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.323657 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5df75c588d-8mgh7" podStartSLOduration=7.323635886 podStartE2EDuration="7.323635886s" podCreationTimestamp="2026-02-02 17:09:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:10.316220396 +0000 UTC m=+1141.937824476" watchObservedRunningTime="2026-02-02 17:09:10.323635886 +0000 UTC m=+1141.945239966" Feb 02 17:09:10 crc kubenswrapper[4835]: I0202 17:09:10.336816 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-97d9cb6c4-nx6lx" podStartSLOduration=4.336797658 podStartE2EDuration="4.336797658s" podCreationTimestamp="2026-02-02 17:09:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:10.334524673 +0000 UTC m=+1141.956128753" watchObservedRunningTime="2026-02-02 17:09:10.336797658 +0000 UTC m=+1141.958401738" Feb 02 17:09:11 crc kubenswrapper[4835]: I0202 17:09:11.199246 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af37c205-e3c2-43d3-a0df-9fb3e8629f87" path="/var/lib/kubelet/pods/af37c205-e3c2-43d3-a0df-9fb3e8629f87/volumes" Feb 02 17:09:12 crc kubenswrapper[4835]: I0202 17:09:12.300707 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" event={"ID":"9768010f-5f61-48d8-883c-d6cf020cfdf1","Type":"ContainerStarted","Data":"7b38d71a3ca0880539e9d959d382a16d05282e549eca3e253c2c7a557c06891e"} Feb 02 17:09:12 crc kubenswrapper[4835]: I0202 17:09:12.301099 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:12 crc kubenswrapper[4835]: I0202 17:09:12.302749 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" event={"ID":"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff","Type":"ContainerStarted","Data":"db32698244b403f3159dd08c33790fdba9e849c48d251fb79c863370592f1c1d"} Feb 02 17:09:12 crc kubenswrapper[4835]: I0202 17:09:12.307070 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-547cf5b9f-w4hcp" event={"ID":"18088c0c-a5e2-4721-a488-8970de4a6277","Type":"ContainerStarted","Data":"ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482"} Feb 02 17:09:12 crc kubenswrapper[4835]: I0202 17:09:12.326340 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" podStartSLOduration=9.326321551 podStartE2EDuration="9.326321551s" podCreationTimestamp="2026-02-02 17:09:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:12.321487474 +0000 UTC m=+1143.943091554" watchObservedRunningTime="2026-02-02 17:09:12.326321551 +0000 UTC m=+1143.947925641" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.325771 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-65fpg" event={"ID":"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9","Type":"ContainerStarted","Data":"3d26f0bce92def978696edee263ee01d46d72d4c212ae7facbd778e8bab9df30"} Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.331954 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-547cf5b9f-w4hcp" event={"ID":"18088c0c-a5e2-4721-a488-8970de4a6277","Type":"ContainerStarted","Data":"a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369"} Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.334715 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" event={"ID":"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff","Type":"ContainerStarted","Data":"5a1e38a06614af2e323519b8e1a56f6a44a79d72dfffdb14c05ec5d07590271b"} Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.349390 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-65fpg" podStartSLOduration=3.454566548 podStartE2EDuration="40.349367247s" podCreationTimestamp="2026-02-02 17:08:33 +0000 UTC" firstStartedPulling="2026-02-02 17:08:35.279403534 +0000 UTC m=+1106.901007604" lastFinishedPulling="2026-02-02 17:09:12.174204223 +0000 UTC m=+1143.795808303" observedRunningTime="2026-02-02 17:09:13.345630411 +0000 UTC m=+1144.967234501" watchObservedRunningTime="2026-02-02 17:09:13.349367247 +0000 UTC m=+1144.970971337" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.384722 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-547cf5b9f-w4hcp" podStartSLOduration=8.035904022 podStartE2EDuration="10.384698215s" podCreationTimestamp="2026-02-02 17:09:03 +0000 UTC" firstStartedPulling="2026-02-02 17:09:08.615039651 +0000 UTC m=+1140.236643731" lastFinishedPulling="2026-02-02 17:09:10.963833844 +0000 UTC m=+1142.585437924" observedRunningTime="2026-02-02 17:09:13.374880687 +0000 UTC m=+1144.996484777" watchObservedRunningTime="2026-02-02 17:09:13.384698215 +0000 UTC m=+1145.006302295" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.509199 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" podStartSLOduration=8.568973923 podStartE2EDuration="10.509177172s" podCreationTimestamp="2026-02-02 17:09:03 +0000 UTC" firstStartedPulling="2026-02-02 17:09:09.024703396 +0000 UTC m=+1140.646307476" lastFinishedPulling="2026-02-02 17:09:10.964906645 +0000 UTC m=+1142.586510725" observedRunningTime="2026-02-02 17:09:13.401659024 +0000 UTC m=+1145.023263124" watchObservedRunningTime="2026-02-02 17:09:13.509177172 +0000 UTC m=+1145.130781262" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.509431 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5c47cddbff-wsm2t"] Feb 02 17:09:13 crc kubenswrapper[4835]: E0202 17:09:13.510032 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af37c205-e3c2-43d3-a0df-9fb3e8629f87" containerName="dnsmasq-dns" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.510051 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="af37c205-e3c2-43d3-a0df-9fb3e8629f87" containerName="dnsmasq-dns" Feb 02 17:09:13 crc kubenswrapper[4835]: E0202 17:09:13.510072 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af37c205-e3c2-43d3-a0df-9fb3e8629f87" containerName="init" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.510080 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="af37c205-e3c2-43d3-a0df-9fb3e8629f87" containerName="init" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.510343 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="af37c205-e3c2-43d3-a0df-9fb3e8629f87" containerName="dnsmasq-dns" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.511743 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.527491 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5c47cddbff-wsm2t"] Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.537290 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6f74b59756-mvv58"] Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.550543 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.562115 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-combined-ca-bundle\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.562172 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cvh9\" (UniqueName: \"kubernetes.io/projected/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-kube-api-access-6cvh9\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.562195 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79900da5-f2b8-4e39-8a30-feefcfec5a04-combined-ca-bundle\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.562219 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-config-data\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.562296 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhdnd\" (UniqueName: \"kubernetes.io/projected/79900da5-f2b8-4e39-8a30-feefcfec5a04-kube-api-access-fhdnd\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.562324 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79900da5-f2b8-4e39-8a30-feefcfec5a04-config-data\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.562343 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79900da5-f2b8-4e39-8a30-feefcfec5a04-logs\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.562374 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79900da5-f2b8-4e39-8a30-feefcfec5a04-config-data-custom\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.562406 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-logs\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.562421 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-config-data-custom\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.578894 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6f74b59756-mvv58"] Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.643391 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5df75c588d-8mgh7"] Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.643603 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5df75c588d-8mgh7" podUID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerName="barbican-api-log" containerID="cri-o://db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e" gracePeriod=30 Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.643699 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5df75c588d-8mgh7" podUID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerName="barbican-api" containerID="cri-o://e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59" gracePeriod=30 Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.666113 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79900da5-f2b8-4e39-8a30-feefcfec5a04-config-data\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.666163 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79900da5-f2b8-4e39-8a30-feefcfec5a04-logs\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.666196 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79900da5-f2b8-4e39-8a30-feefcfec5a04-config-data-custom\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.666231 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-logs\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.666251 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-config-data-custom\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.666284 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-combined-ca-bundle\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.666315 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cvh9\" (UniqueName: \"kubernetes.io/projected/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-kube-api-access-6cvh9\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.666344 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79900da5-f2b8-4e39-8a30-feefcfec5a04-combined-ca-bundle\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.666367 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-config-data\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.666424 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhdnd\" (UniqueName: \"kubernetes.io/projected/79900da5-f2b8-4e39-8a30-feefcfec5a04-kube-api-access-fhdnd\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.669710 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79900da5-f2b8-4e39-8a30-feefcfec5a04-logs\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.669794 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-logs\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.672406 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-config-data-custom\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.679039 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79900da5-f2b8-4e39-8a30-feefcfec5a04-combined-ca-bundle\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.680732 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79900da5-f2b8-4e39-8a30-feefcfec5a04-config-data\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.682176 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-config-data\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.685829 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-788b5b9b58-9wmkc"] Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.688886 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79900da5-f2b8-4e39-8a30-feefcfec5a04-config-data-custom\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.693123 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhdnd\" (UniqueName: \"kubernetes.io/projected/79900da5-f2b8-4e39-8a30-feefcfec5a04-kube-api-access-fhdnd\") pod \"barbican-worker-5c47cddbff-wsm2t\" (UID: \"79900da5-f2b8-4e39-8a30-feefcfec5a04\") " pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.696924 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cvh9\" (UniqueName: \"kubernetes.io/projected/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-kube-api-access-6cvh9\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.701056 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-788b5b9b58-9wmkc"] Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.701211 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.715168 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34fd3f27-2fa7-4a00-8389-97ac4ce31e33-combined-ca-bundle\") pod \"barbican-keystone-listener-6f74b59756-mvv58\" (UID: \"34fd3f27-2fa7-4a00-8389-97ac4ce31e33\") " pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.771760 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-public-tls-certs\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.771835 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7466e48d-b9d4-4a34-917c-5ddd649eaac9-logs\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.771887 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-config-data\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.771947 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-internal-tls-certs\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.771970 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q7t8\" (UniqueName: \"kubernetes.io/projected/7466e48d-b9d4-4a34-917c-5ddd649eaac9-kube-api-access-5q7t8\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.772006 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-combined-ca-bundle\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.772043 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-config-data-custom\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.851937 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5c47cddbff-wsm2t" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.875183 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-public-tls-certs\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.875308 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7466e48d-b9d4-4a34-917c-5ddd649eaac9-logs\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.875403 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-config-data\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.875706 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-internal-tls-certs\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.875734 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q7t8\" (UniqueName: \"kubernetes.io/projected/7466e48d-b9d4-4a34-917c-5ddd649eaac9-kube-api-access-5q7t8\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.875834 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-combined-ca-bundle\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.876779 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-config-data-custom\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.876558 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7466e48d-b9d4-4a34-917c-5ddd649eaac9-logs\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.883868 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-public-tls-certs\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.883933 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-internal-tls-certs\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.884536 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-config-data-custom\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.887164 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-combined-ca-bundle\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.887619 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7466e48d-b9d4-4a34-917c-5ddd649eaac9-config-data\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.887966 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" Feb 02 17:09:13 crc kubenswrapper[4835]: I0202 17:09:13.911169 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q7t8\" (UniqueName: \"kubernetes.io/projected/7466e48d-b9d4-4a34-917c-5ddd649eaac9-kube-api-access-5q7t8\") pod \"barbican-api-788b5b9b58-9wmkc\" (UID: \"7466e48d-b9d4-4a34-917c-5ddd649eaac9\") " pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.059472 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.259832 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.287792 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g898p\" (UniqueName: \"kubernetes.io/projected/2f10a70c-3a54-43cc-8848-cfe8e14cea67-kube-api-access-g898p\") pod \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.287858 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data-custom\") pod \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.287963 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f10a70c-3a54-43cc-8848-cfe8e14cea67-logs\") pod \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.288031 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data\") pod \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.288051 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-combined-ca-bundle\") pod \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\" (UID: \"2f10a70c-3a54-43cc-8848-cfe8e14cea67\") " Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.290590 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f10a70c-3a54-43cc-8848-cfe8e14cea67-logs" (OuterVolumeSpecName: "logs") pod "2f10a70c-3a54-43cc-8848-cfe8e14cea67" (UID: "2f10a70c-3a54-43cc-8848-cfe8e14cea67"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.305284 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2f10a70c-3a54-43cc-8848-cfe8e14cea67" (UID: "2f10a70c-3a54-43cc-8848-cfe8e14cea67"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.305407 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f10a70c-3a54-43cc-8848-cfe8e14cea67-kube-api-access-g898p" (OuterVolumeSpecName: "kube-api-access-g898p") pod "2f10a70c-3a54-43cc-8848-cfe8e14cea67" (UID: "2f10a70c-3a54-43cc-8848-cfe8e14cea67"). InnerVolumeSpecName "kube-api-access-g898p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.338171 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2f10a70c-3a54-43cc-8848-cfe8e14cea67" (UID: "2f10a70c-3a54-43cc-8848-cfe8e14cea67"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.358229 4835 generic.go:334] "Generic (PLEG): container finished" podID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerID="e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59" exitCode=0 Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.358257 4835 generic.go:334] "Generic (PLEG): container finished" podID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerID="db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e" exitCode=143 Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.359226 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5df75c588d-8mgh7" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.359629 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df75c588d-8mgh7" event={"ID":"2f10a70c-3a54-43cc-8848-cfe8e14cea67","Type":"ContainerDied","Data":"e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59"} Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.359690 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df75c588d-8mgh7" event={"ID":"2f10a70c-3a54-43cc-8848-cfe8e14cea67","Type":"ContainerDied","Data":"db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e"} Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.359704 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5df75c588d-8mgh7" event={"ID":"2f10a70c-3a54-43cc-8848-cfe8e14cea67","Type":"ContainerDied","Data":"d90a1f320e20b84e9152198f478559b85e0289d88b6b9138e7f4391603cd8f80"} Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.359721 4835 scope.go:117] "RemoveContainer" containerID="e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.367235 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data" (OuterVolumeSpecName: "config-data") pod "2f10a70c-3a54-43cc-8848-cfe8e14cea67" (UID: "2f10a70c-3a54-43cc-8848-cfe8e14cea67"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.391064 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f10a70c-3a54-43cc-8848-cfe8e14cea67-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.391092 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.391103 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.391112 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g898p\" (UniqueName: \"kubernetes.io/projected/2f10a70c-3a54-43cc-8848-cfe8e14cea67-kube-api-access-g898p\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.391120 4835 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f10a70c-3a54-43cc-8848-cfe8e14cea67-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.394394 4835 scope.go:117] "RemoveContainer" containerID="db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.416235 4835 scope.go:117] "RemoveContainer" containerID="e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59" Feb 02 17:09:14 crc kubenswrapper[4835]: E0202 17:09:14.417077 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59\": container with ID starting with e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59 not found: ID does not exist" containerID="e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.417134 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59"} err="failed to get container status \"e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59\": rpc error: code = NotFound desc = could not find container \"e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59\": container with ID starting with e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59 not found: ID does not exist" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.417171 4835 scope.go:117] "RemoveContainer" containerID="db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e" Feb 02 17:09:14 crc kubenswrapper[4835]: E0202 17:09:14.418167 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e\": container with ID starting with db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e not found: ID does not exist" containerID="db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.418193 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e"} err="failed to get container status \"db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e\": rpc error: code = NotFound desc = could not find container \"db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e\": container with ID starting with db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e not found: ID does not exist" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.418209 4835 scope.go:117] "RemoveContainer" containerID="e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.418430 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59"} err="failed to get container status \"e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59\": rpc error: code = NotFound desc = could not find container \"e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59\": container with ID starting with e9d934cbec8408a47da191c31fc4b07a24ac417728fdb3d66479f3c7fd941f59 not found: ID does not exist" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.418456 4835 scope.go:117] "RemoveContainer" containerID="db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.418790 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e"} err="failed to get container status \"db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e\": rpc error: code = NotFound desc = could not find container \"db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e\": container with ID starting with db90166d1eab6aa9fa6fd08e894eb56717e063ebbe07e3806ad606919fa10d5e not found: ID does not exist" Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.490544 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6f74b59756-mvv58"] Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.501843 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5c47cddbff-wsm2t"] Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.688367 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-788b5b9b58-9wmkc"] Feb 02 17:09:14 crc kubenswrapper[4835]: W0202 17:09:14.721153 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7466e48d_b9d4_4a34_917c_5ddd649eaac9.slice/crio-bc48dcd9f938806edda8398344565f29a698ab2a7f722f01c9ccb0b9e7931b3b WatchSource:0}: Error finding container bc48dcd9f938806edda8398344565f29a698ab2a7f722f01c9ccb0b9e7931b3b: Status 404 returned error can't find the container with id bc48dcd9f938806edda8398344565f29a698ab2a7f722f01c9ccb0b9e7931b3b Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.810772 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5df75c588d-8mgh7"] Feb 02 17:09:14 crc kubenswrapper[4835]: I0202 17:09:14.817932 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5df75c588d-8mgh7"] Feb 02 17:09:15 crc kubenswrapper[4835]: I0202 17:09:15.214037 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" path="/var/lib/kubelet/pods/2f10a70c-3a54-43cc-8848-cfe8e14cea67/volumes" Feb 02 17:09:15 crc kubenswrapper[4835]: I0202 17:09:15.372143 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" event={"ID":"34fd3f27-2fa7-4a00-8389-97ac4ce31e33","Type":"ContainerStarted","Data":"de06e0b96c186f1ed7d855360ac4086709cf64e078bca16c5dcd259a07cfbb7a"} Feb 02 17:09:15 crc kubenswrapper[4835]: I0202 17:09:15.372195 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" event={"ID":"34fd3f27-2fa7-4a00-8389-97ac4ce31e33","Type":"ContainerStarted","Data":"03775d81145423c5b4c12bd0d7efae830d3be47d207daf335c5096035bd85546"} Feb 02 17:09:15 crc kubenswrapper[4835]: I0202 17:09:15.375104 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-788b5b9b58-9wmkc" event={"ID":"7466e48d-b9d4-4a34-917c-5ddd649eaac9","Type":"ContainerStarted","Data":"bc48dcd9f938806edda8398344565f29a698ab2a7f722f01c9ccb0b9e7931b3b"} Feb 02 17:09:15 crc kubenswrapper[4835]: I0202 17:09:15.378181 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5c47cddbff-wsm2t" event={"ID":"79900da5-f2b8-4e39-8a30-feefcfec5a04","Type":"ContainerStarted","Data":"8161e2defe671e2c4930f4b0bf0c4690f7003c540b09f965f88d9055b7d9b938"} Feb 02 17:09:15 crc kubenswrapper[4835]: I0202 17:09:15.378226 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5c47cddbff-wsm2t" event={"ID":"79900da5-f2b8-4e39-8a30-feefcfec5a04","Type":"ContainerStarted","Data":"6344b09c0b9bcefb158b6af80a5e4a662e0248925529222f337a6af211126f56"} Feb 02 17:09:18 crc kubenswrapper[4835]: I0202 17:09:18.411708 4835 generic.go:334] "Generic (PLEG): container finished" podID="7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" containerID="3d26f0bce92def978696edee263ee01d46d72d4c212ae7facbd778e8bab9df30" exitCode=0 Feb 02 17:09:18 crc kubenswrapper[4835]: I0202 17:09:18.411904 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-65fpg" event={"ID":"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9","Type":"ContainerDied","Data":"3d26f0bce92def978696edee263ee01d46d72d4c212ae7facbd778e8bab9df30"} Feb 02 17:09:18 crc kubenswrapper[4835]: I0202 17:09:18.596711 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:18 crc kubenswrapper[4835]: I0202 17:09:18.890754 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.060543 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.074093 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df75c588d-8mgh7" podUID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.147:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.074486 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5df75c588d-8mgh7" podUID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.147:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.132650 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-82dzj"] Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.132920 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" podUID="eb0dd549-94f1-45b7-85c6-96039b500f39" containerName="dnsmasq-dns" containerID="cri-o://4bde80bce095b9e71fc44a95b8656cfa56704328e0611900c35b621b3dbcda02" gracePeriod=10 Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.427791 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" event={"ID":"34fd3f27-2fa7-4a00-8389-97ac4ce31e33","Type":"ContainerStarted","Data":"167e71b7be2ccde443d80750b3a3dbdfe92a9c6f2274233c44c874ef32b904c2"} Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.446507 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5c47cddbff-wsm2t" event={"ID":"79900da5-f2b8-4e39-8a30-feefcfec5a04","Type":"ContainerStarted","Data":"fc4696ea535cbf26abfa86d05ea142dea36901122b2dfd02cf512d294e8aca52"} Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.448584 4835 generic.go:334] "Generic (PLEG): container finished" podID="eb0dd549-94f1-45b7-85c6-96039b500f39" containerID="4bde80bce095b9e71fc44a95b8656cfa56704328e0611900c35b621b3dbcda02" exitCode=0 Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.448841 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" event={"ID":"eb0dd549-94f1-45b7-85c6-96039b500f39","Type":"ContainerDied","Data":"4bde80bce095b9e71fc44a95b8656cfa56704328e0611900c35b621b3dbcda02"} Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.457166 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6f74b59756-mvv58" podStartSLOduration=6.457144968 podStartE2EDuration="6.457144968s" podCreationTimestamp="2026-02-02 17:09:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:19.453939767 +0000 UTC m=+1151.075543867" watchObservedRunningTime="2026-02-02 17:09:19.457144968 +0000 UTC m=+1151.078749048" Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.502219 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5c47cddbff-wsm2t" podStartSLOduration=6.50219581 podStartE2EDuration="6.50219581s" podCreationTimestamp="2026-02-02 17:09:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:19.487069983 +0000 UTC m=+1151.108674063" watchObservedRunningTime="2026-02-02 17:09:19.50219581 +0000 UTC m=+1151.123799890" Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.503448 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-697bf5f454-lm7b8"] Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.503738 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" podUID="0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" containerName="barbican-keystone-listener-log" containerID="cri-o://db32698244b403f3159dd08c33790fdba9e849c48d251fb79c863370592f1c1d" gracePeriod=30 Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.504141 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" podUID="0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" containerName="barbican-keystone-listener" containerID="cri-o://5a1e38a06614af2e323519b8e1a56f6a44a79d72dfffdb14c05ec5d07590271b" gracePeriod=30 Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.554376 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-547cf5b9f-w4hcp"] Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.554733 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-547cf5b9f-w4hcp" podUID="18088c0c-a5e2-4721-a488-8970de4a6277" containerName="barbican-worker-log" containerID="cri-o://ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482" gracePeriod=30 Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.554898 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-547cf5b9f-w4hcp" podUID="18088c0c-a5e2-4721-a488-8970de4a6277" containerName="barbican-worker" containerID="cri-o://a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369" gracePeriod=30 Feb 02 17:09:19 crc kubenswrapper[4835]: I0202 17:09:19.836320 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" podUID="eb0dd549-94f1-45b7-85c6-96039b500f39" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.138:5353: connect: connection refused" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.463714 4835 generic.go:334] "Generic (PLEG): container finished" podID="18088c0c-a5e2-4721-a488-8970de4a6277" containerID="ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482" exitCode=143 Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.463795 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-547cf5b9f-w4hcp" event={"ID":"18088c0c-a5e2-4721-a488-8970de4a6277","Type":"ContainerDied","Data":"ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482"} Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.466941 4835 generic.go:334] "Generic (PLEG): container finished" podID="0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" containerID="db32698244b403f3159dd08c33790fdba9e849c48d251fb79c863370592f1c1d" exitCode=143 Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.467058 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" event={"ID":"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff","Type":"ContainerDied","Data":"db32698244b403f3159dd08c33790fdba9e849c48d251fb79c863370592f1c1d"} Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.732568 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-65fpg" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.829575 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-etc-machine-id\") pod \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.829760 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-scripts\") pod \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.829821 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-config-data\") pod \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.829869 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-combined-ca-bundle\") pod \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.829933 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-db-sync-config-data\") pod \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.829957 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8r2mg\" (UniqueName: \"kubernetes.io/projected/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-kube-api-access-8r2mg\") pod \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\" (UID: \"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9\") " Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.831135 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" (UID: "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.839086 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-kube-api-access-8r2mg" (OuterVolumeSpecName: "kube-api-access-8r2mg") pod "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" (UID: "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9"). InnerVolumeSpecName "kube-api-access-8r2mg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.841389 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" (UID: "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.842344 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-scripts" (OuterVolumeSpecName: "scripts") pod "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" (UID: "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.867152 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" (UID: "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.896193 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-config-data" (OuterVolumeSpecName: "config-data") pod "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" (UID: "7a6ab880-bf41-45c8-a66c-d096cf3d6eb9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.932234 4835 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.932299 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.932311 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.932321 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.932333 4835 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:20 crc kubenswrapper[4835]: I0202 17:09:20.932344 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8r2mg\" (UniqueName: \"kubernetes.io/projected/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9-kube-api-access-8r2mg\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.009395 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.035073 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-sb\") pod \"eb0dd549-94f1-45b7-85c6-96039b500f39\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.035294 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-config\") pod \"eb0dd549-94f1-45b7-85c6-96039b500f39\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.035430 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-dns-svc\") pod \"eb0dd549-94f1-45b7-85c6-96039b500f39\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.035486 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-nb\") pod \"eb0dd549-94f1-45b7-85c6-96039b500f39\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.035565 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mm9h5\" (UniqueName: \"kubernetes.io/projected/eb0dd549-94f1-45b7-85c6-96039b500f39-kube-api-access-mm9h5\") pod \"eb0dd549-94f1-45b7-85c6-96039b500f39\" (UID: \"eb0dd549-94f1-45b7-85c6-96039b500f39\") " Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.040837 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb0dd549-94f1-45b7-85c6-96039b500f39-kube-api-access-mm9h5" (OuterVolumeSpecName: "kube-api-access-mm9h5") pod "eb0dd549-94f1-45b7-85c6-96039b500f39" (UID: "eb0dd549-94f1-45b7-85c6-96039b500f39"). InnerVolumeSpecName "kube-api-access-mm9h5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.117531 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "eb0dd549-94f1-45b7-85c6-96039b500f39" (UID: "eb0dd549-94f1-45b7-85c6-96039b500f39"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.124831 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "eb0dd549-94f1-45b7-85c6-96039b500f39" (UID: "eb0dd549-94f1-45b7-85c6-96039b500f39"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.130139 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-config" (OuterVolumeSpecName: "config") pod "eb0dd549-94f1-45b7-85c6-96039b500f39" (UID: "eb0dd549-94f1-45b7-85c6-96039b500f39"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.130829 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "eb0dd549-94f1-45b7-85c6-96039b500f39" (UID: "eb0dd549-94f1-45b7-85c6-96039b500f39"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.137294 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.137469 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mm9h5\" (UniqueName: \"kubernetes.io/projected/eb0dd549-94f1-45b7-85c6-96039b500f39-kube-api-access-mm9h5\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.137543 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.137615 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.137676 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb0dd549-94f1-45b7-85c6-96039b500f39-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.476147 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.476493 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-82dzj" event={"ID":"eb0dd549-94f1-45b7-85c6-96039b500f39","Type":"ContainerDied","Data":"efb693de69095640b9bf8036589f06a52442f01da80e53494728777996530613"} Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.477524 4835 scope.go:117] "RemoveContainer" containerID="4bde80bce095b9e71fc44a95b8656cfa56704328e0611900c35b621b3dbcda02" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.480228 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"660aa765-d3f6-4673-a4df-7e4b46ab60ac","Type":"ContainerStarted","Data":"dd561385b10d5ee01132243c82995738b019b43ecd2d03d5ee7e6fd4d24160d6"} Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.480366 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.480386 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="ceilometer-central-agent" containerID="cri-o://7176eb1ad20a29f9434aadf7c9c8e4b49b3faffee72a94c9f3cc4d7bba09a7eb" gracePeriod=30 Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.480415 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="proxy-httpd" containerID="cri-o://dd561385b10d5ee01132243c82995738b019b43ecd2d03d5ee7e6fd4d24160d6" gracePeriod=30 Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.480423 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="ceilometer-notification-agent" containerID="cri-o://ca4f044e8a4403a11c7d3bf26b6da66c19176b2e0e401cdfb813d2c967882b00" gracePeriod=30 Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.480435 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="sg-core" containerID="cri-o://acc386e87e7c57d82b5630dca097c063071de98c2a0ba0d468e6f8a9b602c136" gracePeriod=30 Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.483357 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-65fpg" event={"ID":"7a6ab880-bf41-45c8-a66c-d096cf3d6eb9","Type":"ContainerDied","Data":"5aae7824c5cdae00328f7ad62eb45731d64cbb27120fe5be5601865c621d8c74"} Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.483562 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5aae7824c5cdae00328f7ad62eb45731d64cbb27120fe5be5601865c621d8c74" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.484064 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-65fpg" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.496965 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-788b5b9b58-9wmkc" event={"ID":"7466e48d-b9d4-4a34-917c-5ddd649eaac9","Type":"ContainerStarted","Data":"58a5a5a2a2682a6a7ffda70ccda8ae35ac80d2fa0b690601f236898b24be7704"} Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.497338 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-788b5b9b58-9wmkc" event={"ID":"7466e48d-b9d4-4a34-917c-5ddd649eaac9","Type":"ContainerStarted","Data":"9a9f070234393b87cacdea6d1e2f717d4d307921784bd70a3d1ecb8ffbb2cf6a"} Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.497757 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.497827 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.512039 4835 scope.go:117] "RemoveContainer" containerID="7f5aded104bdf926575d1260f4bd38781382455dcb2a364a79a67ed3a80800db" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.528977 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-788b5b9b58-9wmkc" podStartSLOduration=8.528954555 podStartE2EDuration="8.528954555s" podCreationTimestamp="2026-02-02 17:09:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:21.528853972 +0000 UTC m=+1153.150458072" watchObservedRunningTime="2026-02-02 17:09:21.528954555 +0000 UTC m=+1153.150558635" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.529475 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.965773777 podStartE2EDuration="48.529468019s" podCreationTimestamp="2026-02-02 17:08:33 +0000 UTC" firstStartedPulling="2026-02-02 17:08:35.170081206 +0000 UTC m=+1106.791685286" lastFinishedPulling="2026-02-02 17:09:20.733775448 +0000 UTC m=+1152.355379528" observedRunningTime="2026-02-02 17:09:21.506591803 +0000 UTC m=+1153.128195883" watchObservedRunningTime="2026-02-02 17:09:21.529468019 +0000 UTC m=+1153.151072099" Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.554209 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-82dzj"] Feb 02 17:09:21 crc kubenswrapper[4835]: I0202 17:09:21.563026 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-82dzj"] Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.052140 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 17:09:22 crc kubenswrapper[4835]: E0202 17:09:22.052855 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" containerName="cinder-db-sync" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.052874 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" containerName="cinder-db-sync" Feb 02 17:09:22 crc kubenswrapper[4835]: E0202 17:09:22.052886 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb0dd549-94f1-45b7-85c6-96039b500f39" containerName="dnsmasq-dns" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.052893 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb0dd549-94f1-45b7-85c6-96039b500f39" containerName="dnsmasq-dns" Feb 02 17:09:22 crc kubenswrapper[4835]: E0202 17:09:22.052922 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerName="barbican-api" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.052930 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerName="barbican-api" Feb 02 17:09:22 crc kubenswrapper[4835]: E0202 17:09:22.052942 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerName="barbican-api-log" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.052949 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerName="barbican-api-log" Feb 02 17:09:22 crc kubenswrapper[4835]: E0202 17:09:22.052961 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb0dd549-94f1-45b7-85c6-96039b500f39" containerName="init" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.052968 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb0dd549-94f1-45b7-85c6-96039b500f39" containerName="init" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.053186 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb0dd549-94f1-45b7-85c6-96039b500f39" containerName="dnsmasq-dns" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.053208 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerName="barbican-api-log" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.053216 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" containerName="cinder-db-sync" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.053229 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f10a70c-3a54-43cc-8848-cfe8e14cea67" containerName="barbican-api" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.054155 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.064576 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.064870 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-rglht" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.065113 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.065113 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.072926 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.116251 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-wgs86"] Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.121268 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.130646 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-wgs86"] Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.164923 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-scripts\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.166865 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8bdb\" (UniqueName: \"kubernetes.io/projected/05db14b9-403d-4650-8f5a-bb1d0c9be695-kube-api-access-k8bdb\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.169059 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-dns-svc\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.169123 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/05db14b9-403d-4650-8f5a-bb1d0c9be695-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.169153 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.169175 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.169373 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.169399 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.169436 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-config\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.169466 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.169573 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxsmw\" (UniqueName: \"kubernetes.io/projected/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-kube-api-access-xxsmw\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272108 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxsmw\" (UniqueName: \"kubernetes.io/projected/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-kube-api-access-xxsmw\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272195 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-scripts\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272221 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8bdb\" (UniqueName: \"kubernetes.io/projected/05db14b9-403d-4650-8f5a-bb1d0c9be695-kube-api-access-k8bdb\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272237 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-dns-svc\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272263 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/05db14b9-403d-4650-8f5a-bb1d0c9be695-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272290 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272305 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272374 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272393 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272416 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-config\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272435 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.272460 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/05db14b9-403d-4650-8f5a-bb1d0c9be695-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.273373 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-dns-svc\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.273397 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.277218 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.277928 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-config\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.291852 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-scripts\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.291874 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.295172 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8bdb\" (UniqueName: \"kubernetes.io/projected/05db14b9-403d-4650-8f5a-bb1d0c9be695-kube-api-access-k8bdb\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.295404 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.296830 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.299716 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.307967 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.309034 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.309723 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.312193 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxsmw\" (UniqueName: \"kubernetes.io/projected/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-kube-api-access-xxsmw\") pod \"dnsmasq-dns-58db5546cc-wgs86\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.374020 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.374234 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzgcq\" (UniqueName: \"kubernetes.io/projected/58f4e583-8073-47cc-be14-1dbb33cb58e4-kube-api-access-kzgcq\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.374357 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.374424 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-scripts\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.374510 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58f4e583-8073-47cc-be14-1dbb33cb58e4-logs\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.374594 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58f4e583-8073-47cc-be14-1dbb33cb58e4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.374702 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data-custom\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.383638 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.476651 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzgcq\" (UniqueName: \"kubernetes.io/projected/58f4e583-8073-47cc-be14-1dbb33cb58e4-kube-api-access-kzgcq\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.476744 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.476771 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-scripts\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.476836 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58f4e583-8073-47cc-be14-1dbb33cb58e4-logs\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.476900 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58f4e583-8073-47cc-be14-1dbb33cb58e4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.477001 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data-custom\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.477089 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.478867 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58f4e583-8073-47cc-be14-1dbb33cb58e4-logs\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.479511 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.479890 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58f4e583-8073-47cc-be14-1dbb33cb58e4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.482762 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.486037 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data-custom\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.486168 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.491498 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-scripts\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.508777 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzgcq\" (UniqueName: \"kubernetes.io/projected/58f4e583-8073-47cc-be14-1dbb33cb58e4-kube-api-access-kzgcq\") pod \"cinder-api-0\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.546760 4835 generic.go:334] "Generic (PLEG): container finished" podID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerID="dd561385b10d5ee01132243c82995738b019b43ecd2d03d5ee7e6fd4d24160d6" exitCode=0 Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.546818 4835 generic.go:334] "Generic (PLEG): container finished" podID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerID="acc386e87e7c57d82b5630dca097c063071de98c2a0ba0d468e6f8a9b602c136" exitCode=2 Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.546827 4835 generic.go:334] "Generic (PLEG): container finished" podID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerID="7176eb1ad20a29f9434aadf7c9c8e4b49b3faffee72a94c9f3cc4d7bba09a7eb" exitCode=0 Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.546870 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"660aa765-d3f6-4673-a4df-7e4b46ab60ac","Type":"ContainerDied","Data":"dd561385b10d5ee01132243c82995738b019b43ecd2d03d5ee7e6fd4d24160d6"} Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.546895 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"660aa765-d3f6-4673-a4df-7e4b46ab60ac","Type":"ContainerDied","Data":"acc386e87e7c57d82b5630dca097c063071de98c2a0ba0d468e6f8a9b602c136"} Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.546904 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"660aa765-d3f6-4673-a4df-7e4b46ab60ac","Type":"ContainerDied","Data":"7176eb1ad20a29f9434aadf7c9c8e4b49b3faffee72a94c9f3cc4d7bba09a7eb"} Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.549363 4835 generic.go:334] "Generic (PLEG): container finished" podID="0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" containerID="5a1e38a06614af2e323519b8e1a56f6a44a79d72dfffdb14c05ec5d07590271b" exitCode=0 Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.549408 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" event={"ID":"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff","Type":"ContainerDied","Data":"5a1e38a06614af2e323519b8e1a56f6a44a79d72dfffdb14c05ec5d07590271b"} Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.707617 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 17:09:22 crc kubenswrapper[4835]: I0202 17:09:22.914937 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.028494 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.085158 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-wgs86"] Feb 02 17:09:23 crc kubenswrapper[4835]: W0202 17:09:23.106865 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8f2f577_d44e_4442_96b1_cc95f4f8d3d6.slice/crio-93721595a6287fced36c97190847cf30e0a088eeb04140632f9be211dae7b155 WatchSource:0}: Error finding container 93721595a6287fced36c97190847cf30e0a088eeb04140632f9be211dae7b155: Status 404 returned error can't find the container with id 93721595a6287fced36c97190847cf30e0a088eeb04140632f9be211dae7b155 Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.109984 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rg4h9\" (UniqueName: \"kubernetes.io/projected/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-kube-api-access-rg4h9\") pod \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.110102 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data-custom\") pod \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.110150 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data\") pod \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.110181 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-combined-ca-bundle\") pod \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.110255 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-logs\") pod \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\" (UID: \"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff\") " Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.111333 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-logs" (OuterVolumeSpecName: "logs") pod "0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" (UID: "0e3c1b42-af9e-4c9d-9808-dcf1856a9bff"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.118505 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-kube-api-access-rg4h9" (OuterVolumeSpecName: "kube-api-access-rg4h9") pod "0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" (UID: "0e3c1b42-af9e-4c9d-9808-dcf1856a9bff"). InnerVolumeSpecName "kube-api-access-rg4h9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.123474 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" (UID: "0e3c1b42-af9e-4c9d-9808-dcf1856a9bff"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.165934 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" (UID: "0e3c1b42-af9e-4c9d-9808-dcf1856a9bff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.193442 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data" (OuterVolumeSpecName: "config-data") pod "0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" (UID: "0e3c1b42-af9e-4c9d-9808-dcf1856a9bff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.210790 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb0dd549-94f1-45b7-85c6-96039b500f39" path="/var/lib/kubelet/pods/eb0dd549-94f1-45b7-85c6-96039b500f39/volumes" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.211955 4835 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.211986 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.211999 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.212010 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.212021 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rg4h9\" (UniqueName: \"kubernetes.io/projected/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff-kube-api-access-rg4h9\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.342376 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.415388 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7mcl\" (UniqueName: \"kubernetes.io/projected/18088c0c-a5e2-4721-a488-8970de4a6277-kube-api-access-c7mcl\") pod \"18088c0c-a5e2-4721-a488-8970de4a6277\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.416260 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18088c0c-a5e2-4721-a488-8970de4a6277-logs\") pod \"18088c0c-a5e2-4721-a488-8970de4a6277\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.416381 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data\") pod \"18088c0c-a5e2-4721-a488-8970de4a6277\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.416569 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data-custom\") pod \"18088c0c-a5e2-4721-a488-8970de4a6277\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.416622 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-combined-ca-bundle\") pod \"18088c0c-a5e2-4721-a488-8970de4a6277\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.416672 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18088c0c-a5e2-4721-a488-8970de4a6277-logs" (OuterVolumeSpecName: "logs") pod "18088c0c-a5e2-4721-a488-8970de4a6277" (UID: "18088c0c-a5e2-4721-a488-8970de4a6277"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.417218 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18088c0c-a5e2-4721-a488-8970de4a6277-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.421155 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18088c0c-a5e2-4721-a488-8970de4a6277-kube-api-access-c7mcl" (OuterVolumeSpecName: "kube-api-access-c7mcl") pod "18088c0c-a5e2-4721-a488-8970de4a6277" (UID: "18088c0c-a5e2-4721-a488-8970de4a6277"). InnerVolumeSpecName "kube-api-access-c7mcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.431873 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "18088c0c-a5e2-4721-a488-8970de4a6277" (UID: "18088c0c-a5e2-4721-a488-8970de4a6277"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.449135 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 17:09:23 crc kubenswrapper[4835]: E0202 17:09:23.494631 4835 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data podName:18088c0c-a5e2-4721-a488-8970de4a6277 nodeName:}" failed. No retries permitted until 2026-02-02 17:09:23.994606324 +0000 UTC m=+1155.616210404 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data") pod "18088c0c-a5e2-4721-a488-8970de4a6277" (UID: "18088c0c-a5e2-4721-a488-8970de4a6277") : error deleting /var/lib/kubelet/pods/18088c0c-a5e2-4721-a488-8970de4a6277/volume-subpaths: remove /var/lib/kubelet/pods/18088c0c-a5e2-4721-a488-8970de4a6277/volume-subpaths: no such file or directory Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.498429 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18088c0c-a5e2-4721-a488-8970de4a6277" (UID: "18088c0c-a5e2-4721-a488-8970de4a6277"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.518538 4835 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.518674 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.518737 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7mcl\" (UniqueName: \"kubernetes.io/projected/18088c0c-a5e2-4721-a488-8970de4a6277-kube-api-access-c7mcl\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.586700 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58f4e583-8073-47cc-be14-1dbb33cb58e4","Type":"ContainerStarted","Data":"6b7f64152f7bf98b630404f340aea67c384a258dbf38a14f36c54212644f2f95"} Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.593839 4835 generic.go:334] "Generic (PLEG): container finished" podID="18088c0c-a5e2-4721-a488-8970de4a6277" containerID="a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369" exitCode=0 Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.593898 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-547cf5b9f-w4hcp" event={"ID":"18088c0c-a5e2-4721-a488-8970de4a6277","Type":"ContainerDied","Data":"a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369"} Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.593923 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-547cf5b9f-w4hcp" event={"ID":"18088c0c-a5e2-4721-a488-8970de4a6277","Type":"ContainerDied","Data":"0ee8a01e8e6be7ba81872762857b8b46f95bcb3e7e23e80c7e1e7117f46255ed"} Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.593939 4835 scope.go:117] "RemoveContainer" containerID="a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.594052 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-547cf5b9f-w4hcp" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.604948 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" event={"ID":"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6","Type":"ContainerStarted","Data":"99192e86364a156adb02e21e88b11858517a5cef85e8314cff4c849a4a1ec495"} Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.604997 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" event={"ID":"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6","Type":"ContainerStarted","Data":"93721595a6287fced36c97190847cf30e0a088eeb04140632f9be211dae7b155"} Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.632522 4835 generic.go:334] "Generic (PLEG): container finished" podID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerID="ca4f044e8a4403a11c7d3bf26b6da66c19176b2e0e401cdfb813d2c967882b00" exitCode=0 Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.632581 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"660aa765-d3f6-4673-a4df-7e4b46ab60ac","Type":"ContainerDied","Data":"ca4f044e8a4403a11c7d3bf26b6da66c19176b2e0e401cdfb813d2c967882b00"} Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.644494 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"05db14b9-403d-4650-8f5a-bb1d0c9be695","Type":"ContainerStarted","Data":"b9a1b41d348b26bb4ff50bb4d2984d938052b166a80ae47386dfb77eb9696c7a"} Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.693957 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" event={"ID":"0e3c1b42-af9e-4c9d-9808-dcf1856a9bff","Type":"ContainerDied","Data":"57849e81f2534bb1f436f9a1124595aa125a1e4a1538de45987e10f2a4ad26ff"} Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.694139 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-697bf5f454-lm7b8" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.803657 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-697bf5f454-lm7b8"] Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.812119 4835 scope.go:117] "RemoveContainer" containerID="ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.820830 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-697bf5f454-lm7b8"] Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.931982 4835 scope.go:117] "RemoveContainer" containerID="a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369" Feb 02 17:09:23 crc kubenswrapper[4835]: E0202 17:09:23.933849 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369\": container with ID starting with a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369 not found: ID does not exist" containerID="a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.933996 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369"} err="failed to get container status \"a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369\": rpc error: code = NotFound desc = could not find container \"a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369\": container with ID starting with a3424dd3a2c54492013fa80bd94c72b3f33a6ce7ff6f858ec4ae348dd457e369 not found: ID does not exist" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.934035 4835 scope.go:117] "RemoveContainer" containerID="ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482" Feb 02 17:09:23 crc kubenswrapper[4835]: E0202 17:09:23.934442 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482\": container with ID starting with ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482 not found: ID does not exist" containerID="ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.934460 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482"} err="failed to get container status \"ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482\": rpc error: code = NotFound desc = could not find container \"ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482\": container with ID starting with ed0fc58a83e40c0e7eb3c9fc22c10567a0e3e02151b2763ecbaf7f11d92f6482 not found: ID does not exist" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.934473 4835 scope.go:117] "RemoveContainer" containerID="5a1e38a06614af2e323519b8e1a56f6a44a79d72dfffdb14c05ec5d07590271b" Feb 02 17:09:23 crc kubenswrapper[4835]: I0202 17:09:23.994365 4835 scope.go:117] "RemoveContainer" containerID="db32698244b403f3159dd08c33790fdba9e849c48d251fb79c863370592f1c1d" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.031058 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data\") pod \"18088c0c-a5e2-4721-a488-8970de4a6277\" (UID: \"18088c0c-a5e2-4721-a488-8970de4a6277\") " Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.034895 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data" (OuterVolumeSpecName: "config-data") pod "18088c0c-a5e2-4721-a488-8970de4a6277" (UID: "18088c0c-a5e2-4721-a488-8970de4a6277"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.109837 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.132956 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.133632 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18088c0c-a5e2-4721-a488-8970de4a6277-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.225144 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-547cf5b9f-w4hcp"] Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.232468 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-547cf5b9f-w4hcp"] Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.234471 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-run-httpd\") pod \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.234610 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-sg-core-conf-yaml\") pod \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.234662 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prpvz\" (UniqueName: \"kubernetes.io/projected/660aa765-d3f6-4673-a4df-7e4b46ab60ac-kube-api-access-prpvz\") pod \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.234699 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-combined-ca-bundle\") pod \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.234738 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-config-data\") pod \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.234754 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-scripts\") pod \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.234787 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-log-httpd\") pod \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\" (UID: \"660aa765-d3f6-4673-a4df-7e4b46ab60ac\") " Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.236068 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "660aa765-d3f6-4673-a4df-7e4b46ab60ac" (UID: "660aa765-d3f6-4673-a4df-7e4b46ab60ac"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.239395 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "660aa765-d3f6-4673-a4df-7e4b46ab60ac" (UID: "660aa765-d3f6-4673-a4df-7e4b46ab60ac"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.240034 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-scripts" (OuterVolumeSpecName: "scripts") pod "660aa765-d3f6-4673-a4df-7e4b46ab60ac" (UID: "660aa765-d3f6-4673-a4df-7e4b46ab60ac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.243121 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/660aa765-d3f6-4673-a4df-7e4b46ab60ac-kube-api-access-prpvz" (OuterVolumeSpecName: "kube-api-access-prpvz") pod "660aa765-d3f6-4673-a4df-7e4b46ab60ac" (UID: "660aa765-d3f6-4673-a4df-7e4b46ab60ac"). InnerVolumeSpecName "kube-api-access-prpvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.268906 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "660aa765-d3f6-4673-a4df-7e4b46ab60ac" (UID: "660aa765-d3f6-4673-a4df-7e4b46ab60ac"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.333494 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "660aa765-d3f6-4673-a4df-7e4b46ab60ac" (UID: "660aa765-d3f6-4673-a4df-7e4b46ab60ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.337320 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.337343 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.337352 4835 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.337362 4835 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/660aa765-d3f6-4673-a4df-7e4b46ab60ac-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.337370 4835 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.337378 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prpvz\" (UniqueName: \"kubernetes.io/projected/660aa765-d3f6-4673-a4df-7e4b46ab60ac-kube-api-access-prpvz\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.387564 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-config-data" (OuterVolumeSpecName: "config-data") pod "660aa765-d3f6-4673-a4df-7e4b46ab60ac" (UID: "660aa765-d3f6-4673-a4df-7e4b46ab60ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.439566 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660aa765-d3f6-4673-a4df-7e4b46ab60ac-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.711490 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"05db14b9-403d-4650-8f5a-bb1d0c9be695","Type":"ContainerStarted","Data":"22e12ea32d74ea8bf72af328ee396e87f2d0e8e5d1a3dee6a7905471a0a69cf2"} Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.714531 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58f4e583-8073-47cc-be14-1dbb33cb58e4","Type":"ContainerStarted","Data":"0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634"} Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.716656 4835 generic.go:334] "Generic (PLEG): container finished" podID="a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" containerID="99192e86364a156adb02e21e88b11858517a5cef85e8314cff4c849a4a1ec495" exitCode=0 Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.716709 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" event={"ID":"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6","Type":"ContainerDied","Data":"99192e86364a156adb02e21e88b11858517a5cef85e8314cff4c849a4a1ec495"} Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.716726 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" event={"ID":"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6","Type":"ContainerStarted","Data":"40722646456bae29e7e4f33bfdda31d100f0995b32dd23015176fb24a5c5b2ab"} Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.716933 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.726807 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"660aa765-d3f6-4673-a4df-7e4b46ab60ac","Type":"ContainerDied","Data":"e858ff3776cd796c5a895df5624518e5fa044eb9b5323019cf43d09979385130"} Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.726864 4835 scope.go:117] "RemoveContainer" containerID="dd561385b10d5ee01132243c82995738b019b43ecd2d03d5ee7e6fd4d24160d6" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.726891 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.741101 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" podStartSLOduration=2.741084981 podStartE2EDuration="2.741084981s" podCreationTimestamp="2026-02-02 17:09:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:24.739187728 +0000 UTC m=+1156.360791818" watchObservedRunningTime="2026-02-02 17:09:24.741084981 +0000 UTC m=+1156.362689061" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.789026 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.806048 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824030 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:24 crc kubenswrapper[4835]: E0202 17:09:24.824544 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="ceilometer-central-agent" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824565 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="ceilometer-central-agent" Feb 02 17:09:24 crc kubenswrapper[4835]: E0202 17:09:24.824580 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="ceilometer-notification-agent" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824588 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="ceilometer-notification-agent" Feb 02 17:09:24 crc kubenswrapper[4835]: E0202 17:09:24.824600 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="proxy-httpd" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824607 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="proxy-httpd" Feb 02 17:09:24 crc kubenswrapper[4835]: E0202 17:09:24.824650 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" containerName="barbican-keystone-listener-log" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824659 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" containerName="barbican-keystone-listener-log" Feb 02 17:09:24 crc kubenswrapper[4835]: E0202 17:09:24.824674 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="sg-core" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824680 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="sg-core" Feb 02 17:09:24 crc kubenswrapper[4835]: E0202 17:09:24.824694 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" containerName="barbican-keystone-listener" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824701 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" containerName="barbican-keystone-listener" Feb 02 17:09:24 crc kubenswrapper[4835]: E0202 17:09:24.824713 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18088c0c-a5e2-4721-a488-8970de4a6277" containerName="barbican-worker" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824739 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="18088c0c-a5e2-4721-a488-8970de4a6277" containerName="barbican-worker" Feb 02 17:09:24 crc kubenswrapper[4835]: E0202 17:09:24.824750 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18088c0c-a5e2-4721-a488-8970de4a6277" containerName="barbican-worker-log" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824757 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="18088c0c-a5e2-4721-a488-8970de4a6277" containerName="barbican-worker-log" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824935 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" containerName="barbican-keystone-listener-log" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824950 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="ceilometer-central-agent" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824964 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="18088c0c-a5e2-4721-a488-8970de4a6277" containerName="barbican-worker-log" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824978 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="ceilometer-notification-agent" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.824989 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="proxy-httpd" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.825003 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" containerName="sg-core" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.825016 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="18088c0c-a5e2-4721-a488-8970de4a6277" containerName="barbican-worker" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.825032 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" containerName="barbican-keystone-listener" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.828162 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.833936 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.834150 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.834860 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.871127 4835 scope.go:117] "RemoveContainer" containerID="acc386e87e7c57d82b5630dca097c063071de98c2a0ba0d468e6f8a9b602c136" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.897930 4835 scope.go:117] "RemoveContainer" containerID="ca4f044e8a4403a11c7d3bf26b6da66c19176b2e0e401cdfb813d2c967882b00" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.938054 4835 scope.go:117] "RemoveContainer" containerID="7176eb1ad20a29f9434aadf7c9c8e4b49b3faffee72a94c9f3cc4d7bba09a7eb" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.957696 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.957855 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-scripts\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.957990 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-run-httpd\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.958095 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22rmb\" (UniqueName: \"kubernetes.io/projected/19760545-1b23-461f-9e4b-d8b2d798fbcb-kube-api-access-22rmb\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.958377 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.958533 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-log-httpd\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:24 crc kubenswrapper[4835]: I0202 17:09:24.958604 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-config-data\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.060698 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.060783 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-log-httpd\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.060816 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-config-data\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.060915 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.060944 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-scripts\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.060973 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-run-httpd\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.061005 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22rmb\" (UniqueName: \"kubernetes.io/projected/19760545-1b23-461f-9e4b-d8b2d798fbcb-kube-api-access-22rmb\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.061361 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-log-httpd\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.061631 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-run-httpd\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.065072 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.065226 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-config-data\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.067042 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-scripts\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.069778 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.081135 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22rmb\" (UniqueName: \"kubernetes.io/projected/19760545-1b23-461f-9e4b-d8b2d798fbcb-kube-api-access-22rmb\") pod \"ceilometer-0\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.194435 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.200940 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e3c1b42-af9e-4c9d-9808-dcf1856a9bff" path="/var/lib/kubelet/pods/0e3c1b42-af9e-4c9d-9808-dcf1856a9bff/volumes" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.201955 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18088c0c-a5e2-4721-a488-8970de4a6277" path="/var/lib/kubelet/pods/18088c0c-a5e2-4721-a488-8970de4a6277/volumes" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.202807 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="660aa765-d3f6-4673-a4df-7e4b46ab60ac" path="/var/lib/kubelet/pods/660aa765-d3f6-4673-a4df-7e4b46ab60ac/volumes" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.705073 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.739183 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58f4e583-8073-47cc-be14-1dbb33cb58e4","Type":"ContainerStarted","Data":"02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3"} Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.739304 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.739311 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="58f4e583-8073-47cc-be14-1dbb33cb58e4" containerName="cinder-api-log" containerID="cri-o://0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634" gracePeriod=30 Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.739411 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="58f4e583-8073-47cc-be14-1dbb33cb58e4" containerName="cinder-api" containerID="cri-o://02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3" gracePeriod=30 Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.745442 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19760545-1b23-461f-9e4b-d8b2d798fbcb","Type":"ContainerStarted","Data":"a97273058c7f02b460228d0abf224ce8525bff50089887f88ecd25830508a089"} Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.749873 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"05db14b9-403d-4650-8f5a-bb1d0c9be695","Type":"ContainerStarted","Data":"7239f5326c9db6a9d06b21d2f54aeadf195425d961c4fcfddb47d183eca9fcaf"} Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.761216 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.761202874 podStartE2EDuration="3.761202874s" podCreationTimestamp="2026-02-02 17:09:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:25.75963263 +0000 UTC m=+1157.381236730" watchObservedRunningTime="2026-02-02 17:09:25.761202874 +0000 UTC m=+1157.382806944" Feb 02 17:09:25 crc kubenswrapper[4835]: I0202 17:09:25.805680 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.733606119 podStartE2EDuration="3.80565931s" podCreationTimestamp="2026-02-02 17:09:22 +0000 UTC" firstStartedPulling="2026-02-02 17:09:22.923180518 +0000 UTC m=+1154.544784608" lastFinishedPulling="2026-02-02 17:09:23.995233719 +0000 UTC m=+1155.616837799" observedRunningTime="2026-02-02 17:09:25.79256036 +0000 UTC m=+1157.414164460" watchObservedRunningTime="2026-02-02 17:09:25.80565931 +0000 UTC m=+1157.427263390" Feb 02 17:09:25 crc kubenswrapper[4835]: E0202 17:09:25.866555 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58f4e583_8073_47cc_be14_1dbb33cb58e4.slice/crio-0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58f4e583_8073_47cc_be14_1dbb33cb58e4.slice/crio-conmon-0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634.scope\": RecentStats: unable to find data in memory cache]" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.502433 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.603458 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-combined-ca-bundle\") pod \"58f4e583-8073-47cc-be14-1dbb33cb58e4\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.603575 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzgcq\" (UniqueName: \"kubernetes.io/projected/58f4e583-8073-47cc-be14-1dbb33cb58e4-kube-api-access-kzgcq\") pod \"58f4e583-8073-47cc-be14-1dbb33cb58e4\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.603634 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58f4e583-8073-47cc-be14-1dbb33cb58e4-logs\") pod \"58f4e583-8073-47cc-be14-1dbb33cb58e4\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.603704 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data\") pod \"58f4e583-8073-47cc-be14-1dbb33cb58e4\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.603728 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-scripts\") pod \"58f4e583-8073-47cc-be14-1dbb33cb58e4\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.603763 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58f4e583-8073-47cc-be14-1dbb33cb58e4-etc-machine-id\") pod \"58f4e583-8073-47cc-be14-1dbb33cb58e4\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.603785 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data-custom\") pod \"58f4e583-8073-47cc-be14-1dbb33cb58e4\" (UID: \"58f4e583-8073-47cc-be14-1dbb33cb58e4\") " Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.605009 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58f4e583-8073-47cc-be14-1dbb33cb58e4-logs" (OuterVolumeSpecName: "logs") pod "58f4e583-8073-47cc-be14-1dbb33cb58e4" (UID: "58f4e583-8073-47cc-be14-1dbb33cb58e4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.605129 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/58f4e583-8073-47cc-be14-1dbb33cb58e4-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "58f4e583-8073-47cc-be14-1dbb33cb58e4" (UID: "58f4e583-8073-47cc-be14-1dbb33cb58e4"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.609235 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58f4e583-8073-47cc-be14-1dbb33cb58e4-kube-api-access-kzgcq" (OuterVolumeSpecName: "kube-api-access-kzgcq") pod "58f4e583-8073-47cc-be14-1dbb33cb58e4" (UID: "58f4e583-8073-47cc-be14-1dbb33cb58e4"). InnerVolumeSpecName "kube-api-access-kzgcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.609810 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "58f4e583-8073-47cc-be14-1dbb33cb58e4" (UID: "58f4e583-8073-47cc-be14-1dbb33cb58e4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.609687 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-scripts" (OuterVolumeSpecName: "scripts") pod "58f4e583-8073-47cc-be14-1dbb33cb58e4" (UID: "58f4e583-8073-47cc-be14-1dbb33cb58e4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.643391 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58f4e583-8073-47cc-be14-1dbb33cb58e4" (UID: "58f4e583-8073-47cc-be14-1dbb33cb58e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.658725 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data" (OuterVolumeSpecName: "config-data") pod "58f4e583-8073-47cc-be14-1dbb33cb58e4" (UID: "58f4e583-8073-47cc-be14-1dbb33cb58e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.707569 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58f4e583-8073-47cc-be14-1dbb33cb58e4-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.707876 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.707889 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.707901 4835 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58f4e583-8073-47cc-be14-1dbb33cb58e4-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.707914 4835 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.707928 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58f4e583-8073-47cc-be14-1dbb33cb58e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.707939 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzgcq\" (UniqueName: \"kubernetes.io/projected/58f4e583-8073-47cc-be14-1dbb33cb58e4-kube-api-access-kzgcq\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.767886 4835 generic.go:334] "Generic (PLEG): container finished" podID="58f4e583-8073-47cc-be14-1dbb33cb58e4" containerID="02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3" exitCode=0 Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.768891 4835 generic.go:334] "Generic (PLEG): container finished" podID="58f4e583-8073-47cc-be14-1dbb33cb58e4" containerID="0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634" exitCode=143 Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.769028 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58f4e583-8073-47cc-be14-1dbb33cb58e4","Type":"ContainerDied","Data":"02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3"} Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.769105 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58f4e583-8073-47cc-be14-1dbb33cb58e4","Type":"ContainerDied","Data":"0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634"} Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.769203 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58f4e583-8073-47cc-be14-1dbb33cb58e4","Type":"ContainerDied","Data":"6b7f64152f7bf98b630404f340aea67c384a258dbf38a14f36c54212644f2f95"} Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.769292 4835 scope.go:117] "RemoveContainer" containerID="02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.769566 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.778481 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19760545-1b23-461f-9e4b-d8b2d798fbcb","Type":"ContainerStarted","Data":"37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c"} Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.807851 4835 scope.go:117] "RemoveContainer" containerID="0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.838235 4835 scope.go:117] "RemoveContainer" containerID="02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.838841 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 02 17:09:26 crc kubenswrapper[4835]: E0202 17:09:26.840423 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3\": container with ID starting with 02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3 not found: ID does not exist" containerID="02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.840462 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3"} err="failed to get container status \"02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3\": rpc error: code = NotFound desc = could not find container \"02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3\": container with ID starting with 02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3 not found: ID does not exist" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.840491 4835 scope.go:117] "RemoveContainer" containerID="0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634" Feb 02 17:09:26 crc kubenswrapper[4835]: E0202 17:09:26.841958 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634\": container with ID starting with 0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634 not found: ID does not exist" containerID="0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.842002 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634"} err="failed to get container status \"0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634\": rpc error: code = NotFound desc = could not find container \"0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634\": container with ID starting with 0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634 not found: ID does not exist" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.842033 4835 scope.go:117] "RemoveContainer" containerID="02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.846244 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3"} err="failed to get container status \"02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3\": rpc error: code = NotFound desc = could not find container \"02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3\": container with ID starting with 02825d02cb00c0dc6829767c02c5c94b167c95ef1a2650db7f52d257fe6a66a3 not found: ID does not exist" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.846849 4835 scope.go:117] "RemoveContainer" containerID="0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.848636 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634"} err="failed to get container status \"0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634\": rpc error: code = NotFound desc = could not find container \"0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634\": container with ID starting with 0ae66dd2a0b56d0a12048a87c2d4fdbe8dafd5f19bbc65df439cef4c77d5b634 not found: ID does not exist" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.854484 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.861166 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 02 17:09:26 crc kubenswrapper[4835]: E0202 17:09:26.863528 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58f4e583-8073-47cc-be14-1dbb33cb58e4" containerName="cinder-api" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.863550 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="58f4e583-8073-47cc-be14-1dbb33cb58e4" containerName="cinder-api" Feb 02 17:09:26 crc kubenswrapper[4835]: E0202 17:09:26.863567 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58f4e583-8073-47cc-be14-1dbb33cb58e4" containerName="cinder-api-log" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.863573 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="58f4e583-8073-47cc-be14-1dbb33cb58e4" containerName="cinder-api-log" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.863710 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="58f4e583-8073-47cc-be14-1dbb33cb58e4" containerName="cinder-api-log" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.863732 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="58f4e583-8073-47cc-be14-1dbb33cb58e4" containerName="cinder-api" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.864746 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.872202 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.872346 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.875841 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.887131 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.910645 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-config-data\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.910858 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.910951 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-scripts\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.911050 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpg5d\" (UniqueName: \"kubernetes.io/projected/99971416-88df-48dd-9e3a-91874214a8b6-kube-api-access-xpg5d\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.911119 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.911187 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99971416-88df-48dd-9e3a-91874214a8b6-logs\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.911327 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/99971416-88df-48dd-9e3a-91874214a8b6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.912617 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-public-tls-certs\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:26 crc kubenswrapper[4835]: I0202 17:09:26.912667 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-config-data-custom\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.013980 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.014050 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99971416-88df-48dd-9e3a-91874214a8b6-logs\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.014142 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/99971416-88df-48dd-9e3a-91874214a8b6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.014183 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-public-tls-certs\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.014213 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-config-data-custom\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.014306 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-config-data\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.014332 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.014358 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-scripts\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.014422 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpg5d\" (UniqueName: \"kubernetes.io/projected/99971416-88df-48dd-9e3a-91874214a8b6-kube-api-access-xpg5d\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.014741 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/99971416-88df-48dd-9e3a-91874214a8b6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.015155 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99971416-88df-48dd-9e3a-91874214a8b6-logs\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.019172 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-config-data-custom\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.019824 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.020435 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.020825 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-scripts\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.022816 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-config-data\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.030929 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/99971416-88df-48dd-9e3a-91874214a8b6-public-tls-certs\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.032499 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpg5d\" (UniqueName: \"kubernetes.io/projected/99971416-88df-48dd-9e3a-91874214a8b6-kube-api-access-xpg5d\") pod \"cinder-api-0\" (UID: \"99971416-88df-48dd-9e3a-91874214a8b6\") " pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.186138 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.197769 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58f4e583-8073-47cc-be14-1dbb33cb58e4" path="/var/lib/kubelet/pods/58f4e583-8073-47cc-be14-1dbb33cb58e4/volumes" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.384214 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.658439 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 17:09:27 crc kubenswrapper[4835]: W0202 17:09:27.659168 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99971416_88df_48dd_9e3a_91874214a8b6.slice/crio-f18d4e42dbf4d2fc9e8f773606b1bf37f3cc188811c5eb1b236770e5d598d244 WatchSource:0}: Error finding container f18d4e42dbf4d2fc9e8f773606b1bf37f3cc188811c5eb1b236770e5d598d244: Status 404 returned error can't find the container with id f18d4e42dbf4d2fc9e8f773606b1bf37f3cc188811c5eb1b236770e5d598d244 Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.798424 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"99971416-88df-48dd-9e3a-91874214a8b6","Type":"ContainerStarted","Data":"f18d4e42dbf4d2fc9e8f773606b1bf37f3cc188811c5eb1b236770e5d598d244"} Feb 02 17:09:27 crc kubenswrapper[4835]: I0202 17:09:27.802567 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19760545-1b23-461f-9e4b-d8b2d798fbcb","Type":"ContainerStarted","Data":"9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db"} Feb 02 17:09:28 crc kubenswrapper[4835]: I0202 17:09:28.811507 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"99971416-88df-48dd-9e3a-91874214a8b6","Type":"ContainerStarted","Data":"c98e0022e29b05dc3f30174cea9a71dbd4288cab0443f425849d47505af6b6d0"} Feb 02 17:09:28 crc kubenswrapper[4835]: I0202 17:09:28.813461 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19760545-1b23-461f-9e4b-d8b2d798fbcb","Type":"ContainerStarted","Data":"e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254"} Feb 02 17:09:29 crc kubenswrapper[4835]: I0202 17:09:29.823914 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"99971416-88df-48dd-9e3a-91874214a8b6","Type":"ContainerStarted","Data":"a07b6b739d070dc98911da4c7a8755047a2b8132eb127f24f14c7e0d30357a5d"} Feb 02 17:09:29 crc kubenswrapper[4835]: I0202 17:09:29.824311 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 02 17:09:29 crc kubenswrapper[4835]: I0202 17:09:29.845129 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.845109812 podStartE2EDuration="3.845109812s" podCreationTimestamp="2026-02-02 17:09:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:29.844537386 +0000 UTC m=+1161.466141466" watchObservedRunningTime="2026-02-02 17:09:29.845109812 +0000 UTC m=+1161.466713902" Feb 02 17:09:30 crc kubenswrapper[4835]: I0202 17:09:30.601471 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:30 crc kubenswrapper[4835]: I0202 17:09:30.607218 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-788b5b9b58-9wmkc" Feb 02 17:09:30 crc kubenswrapper[4835]: I0202 17:09:30.685704 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-97d9cb6c4-nx6lx"] Feb 02 17:09:30 crc kubenswrapper[4835]: I0202 17:09:30.686031 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-97d9cb6c4-nx6lx" podUID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerName="barbican-api-log" containerID="cri-o://e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173" gracePeriod=30 Feb 02 17:09:30 crc kubenswrapper[4835]: I0202 17:09:30.686545 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-97d9cb6c4-nx6lx" podUID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerName="barbican-api" containerID="cri-o://7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9" gracePeriod=30 Feb 02 17:09:30 crc kubenswrapper[4835]: I0202 17:09:30.776459 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:09:30 crc kubenswrapper[4835]: I0202 17:09:30.841871 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19760545-1b23-461f-9e4b-d8b2d798fbcb","Type":"ContainerStarted","Data":"09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51"} Feb 02 17:09:30 crc kubenswrapper[4835]: I0202 17:09:30.843005 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 17:09:30 crc kubenswrapper[4835]: I0202 17:09:30.857783 4835 generic.go:334] "Generic (PLEG): container finished" podID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerID="e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173" exitCode=143 Feb 02 17:09:30 crc kubenswrapper[4835]: I0202 17:09:30.857974 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-97d9cb6c4-nx6lx" event={"ID":"b4cf63eb-ca5d-4d27-8aff-b659a19a7938","Type":"ContainerDied","Data":"e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173"} Feb 02 17:09:30 crc kubenswrapper[4835]: I0202 17:09:30.882574 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.336624203 podStartE2EDuration="6.882555525s" podCreationTimestamp="2026-02-02 17:09:24 +0000 UTC" firstStartedPulling="2026-02-02 17:09:25.712772696 +0000 UTC m=+1157.334376776" lastFinishedPulling="2026-02-02 17:09:30.258704008 +0000 UTC m=+1161.880308098" observedRunningTime="2026-02-02 17:09:30.867325655 +0000 UTC m=+1162.488929735" watchObservedRunningTime="2026-02-02 17:09:30.882555525 +0000 UTC m=+1162.504159605" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.038118 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7874ff7b65-42jrg"] Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.038745 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7874ff7b65-42jrg" podUID="8b10dcfd-03a0-478a-87c1-f6d87260571a" containerName="neutron-api" containerID="cri-o://5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5" gracePeriod=30 Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.039852 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7874ff7b65-42jrg" podUID="8b10dcfd-03a0-478a-87c1-f6d87260571a" containerName="neutron-httpd" containerID="cri-o://e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4" gracePeriod=30 Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.080491 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.081402 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-66f46bdd4f-5p4b9"] Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.083010 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.097709 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-66f46bdd4f-5p4b9"] Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.195249 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-httpd-config\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.195313 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-public-tls-certs\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.195338 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-config\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.195413 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfgvb\" (UniqueName: \"kubernetes.io/projected/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-kube-api-access-mfgvb\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.195430 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-ovndb-tls-certs\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.195451 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-combined-ca-bundle\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.195489 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-internal-tls-certs\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.297430 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-internal-tls-certs\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.297539 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-httpd-config\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.297559 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-public-tls-certs\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.297580 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-config\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.297658 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfgvb\" (UniqueName: \"kubernetes.io/projected/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-kube-api-access-mfgvb\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.297673 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-ovndb-tls-certs\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.297691 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-combined-ca-bundle\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.312250 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-ovndb-tls-certs\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.312376 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-httpd-config\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.312559 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-internal-tls-certs\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.312727 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-public-tls-certs\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.312953 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-config\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.313066 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-combined-ca-bundle\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.318875 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfgvb\" (UniqueName: \"kubernetes.io/projected/91d3abaa-c52b-495d-b400-8d7ad6ad28e9-kube-api-access-mfgvb\") pod \"neutron-66f46bdd4f-5p4b9\" (UID: \"91d3abaa-c52b-495d-b400-8d7ad6ad28e9\") " pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.420710 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.877565 4835 generic.go:334] "Generic (PLEG): container finished" podID="8b10dcfd-03a0-478a-87c1-f6d87260571a" containerID="e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4" exitCode=0 Feb 02 17:09:31 crc kubenswrapper[4835]: I0202 17:09:31.878144 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7874ff7b65-42jrg" event={"ID":"8b10dcfd-03a0-478a-87c1-f6d87260571a","Type":"ContainerDied","Data":"e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4"} Feb 02 17:09:32 crc kubenswrapper[4835]: I0202 17:09:32.483400 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:09:32 crc kubenswrapper[4835]: I0202 17:09:32.554250 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-66f46bdd4f-5p4b9"] Feb 02 17:09:32 crc kubenswrapper[4835]: I0202 17:09:32.565570 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-mgjw6"] Feb 02 17:09:32 crc kubenswrapper[4835]: I0202 17:09:32.566100 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" podUID="9768010f-5f61-48d8-883c-d6cf020cfdf1" containerName="dnsmasq-dns" containerID="cri-o://7b38d71a3ca0880539e9d959d382a16d05282e549eca3e253c2c7a557c06891e" gracePeriod=10 Feb 02 17:09:32 crc kubenswrapper[4835]: W0202 17:09:32.567673 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91d3abaa_c52b_495d_b400_8d7ad6ad28e9.slice/crio-0420fc9833dab5f940ca98f4d48b063e511e31228ebf537d140c2656e3cd496d WatchSource:0}: Error finding container 0420fc9833dab5f940ca98f4d48b063e511e31228ebf537d140c2656e3cd496d: Status 404 returned error can't find the container with id 0420fc9833dab5f940ca98f4d48b063e511e31228ebf537d140c2656e3cd496d Feb 02 17:09:32 crc kubenswrapper[4835]: I0202 17:09:32.778121 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 02 17:09:32 crc kubenswrapper[4835]: I0202 17:09:32.835082 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 17:09:32 crc kubenswrapper[4835]: I0202 17:09:32.886836 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66f46bdd4f-5p4b9" event={"ID":"91d3abaa-c52b-495d-b400-8d7ad6ad28e9","Type":"ContainerStarted","Data":"0420fc9833dab5f940ca98f4d48b063e511e31228ebf537d140c2656e3cd496d"} Feb 02 17:09:32 crc kubenswrapper[4835]: I0202 17:09:32.889669 4835 generic.go:334] "Generic (PLEG): container finished" podID="9768010f-5f61-48d8-883c-d6cf020cfdf1" containerID="7b38d71a3ca0880539e9d959d382a16d05282e549eca3e253c2c7a557c06891e" exitCode=0 Feb 02 17:09:32 crc kubenswrapper[4835]: I0202 17:09:32.890734 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" event={"ID":"9768010f-5f61-48d8-883c-d6cf020cfdf1","Type":"ContainerDied","Data":"7b38d71a3ca0880539e9d959d382a16d05282e549eca3e253c2c7a557c06891e"} Feb 02 17:09:32 crc kubenswrapper[4835]: I0202 17:09:32.890927 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="05db14b9-403d-4650-8f5a-bb1d0c9be695" containerName="cinder-scheduler" containerID="cri-o://22e12ea32d74ea8bf72af328ee396e87f2d0e8e5d1a3dee6a7905471a0a69cf2" gracePeriod=30 Feb 02 17:09:32 crc kubenswrapper[4835]: I0202 17:09:32.891468 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="05db14b9-403d-4650-8f5a-bb1d0c9be695" containerName="probe" containerID="cri-o://7239f5326c9db6a9d06b21d2f54aeadf195425d961c4fcfddb47d183eca9fcaf" gracePeriod=30 Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.141502 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-7874ff7b65-42jrg" podUID="8b10dcfd-03a0-478a-87c1-f6d87260571a" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.143:9696/\": dial tcp 10.217.0.143:9696: connect: connection refused" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.184979 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.260194 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-dns-svc\") pod \"9768010f-5f61-48d8-883c-d6cf020cfdf1\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.260408 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-sb\") pod \"9768010f-5f61-48d8-883c-d6cf020cfdf1\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.260535 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-nb\") pod \"9768010f-5f61-48d8-883c-d6cf020cfdf1\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.260624 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fw4ms\" (UniqueName: \"kubernetes.io/projected/9768010f-5f61-48d8-883c-d6cf020cfdf1-kube-api-access-fw4ms\") pod \"9768010f-5f61-48d8-883c-d6cf020cfdf1\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.260719 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-config\") pod \"9768010f-5f61-48d8-883c-d6cf020cfdf1\" (UID: \"9768010f-5f61-48d8-883c-d6cf020cfdf1\") " Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.296555 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9768010f-5f61-48d8-883c-d6cf020cfdf1-kube-api-access-fw4ms" (OuterVolumeSpecName: "kube-api-access-fw4ms") pod "9768010f-5f61-48d8-883c-d6cf020cfdf1" (UID: "9768010f-5f61-48d8-883c-d6cf020cfdf1"). InnerVolumeSpecName "kube-api-access-fw4ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.320045 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9768010f-5f61-48d8-883c-d6cf020cfdf1" (UID: "9768010f-5f61-48d8-883c-d6cf020cfdf1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.336843 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9768010f-5f61-48d8-883c-d6cf020cfdf1" (UID: "9768010f-5f61-48d8-883c-d6cf020cfdf1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.352455 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-config" (OuterVolumeSpecName: "config") pod "9768010f-5f61-48d8-883c-d6cf020cfdf1" (UID: "9768010f-5f61-48d8-883c-d6cf020cfdf1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.353821 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9768010f-5f61-48d8-883c-d6cf020cfdf1" (UID: "9768010f-5f61-48d8-883c-d6cf020cfdf1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.361605 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.361638 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.361648 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.361657 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fw4ms\" (UniqueName: \"kubernetes.io/projected/9768010f-5f61-48d8-883c-d6cf020cfdf1-kube-api-access-fw4ms\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.361666 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9768010f-5f61-48d8-883c-d6cf020cfdf1-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.877004 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-97d9cb6c4-nx6lx" podUID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.148:9311/healthcheck\": read tcp 10.217.0.2:59100->10.217.0.148:9311: read: connection reset by peer" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.877023 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-97d9cb6c4-nx6lx" podUID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.148:9311/healthcheck\": read tcp 10.217.0.2:59102->10.217.0.148:9311: read: connection reset by peer" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.913175 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.941946 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.942097 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-mgjw6" event={"ID":"9768010f-5f61-48d8-883c-d6cf020cfdf1","Type":"ContainerDied","Data":"0b6563c7f2bc3d47239c9e8a55a9efda7b2a15491dc286cb634c6bbf8d67e2f8"} Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.942150 4835 scope.go:117] "RemoveContainer" containerID="7b38d71a3ca0880539e9d959d382a16d05282e549eca3e253c2c7a557c06891e" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.950922 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66f46bdd4f-5p4b9" event={"ID":"91d3abaa-c52b-495d-b400-8d7ad6ad28e9","Type":"ContainerStarted","Data":"e21c7266227d931be777b937bca7af0b98657d496aee928cd897ad3de347a978"} Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.950961 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66f46bdd4f-5p4b9" event={"ID":"91d3abaa-c52b-495d-b400-8d7ad6ad28e9","Type":"ContainerStarted","Data":"23bab9400f8b132f25ae7ac95710075fedb865dbcbb93d00af9d3656c1cd0e7a"} Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.951183 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:09:33 crc kubenswrapper[4835]: I0202 17:09:33.996289 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-66f46bdd4f-5p4b9" podStartSLOduration=2.9962572610000002 podStartE2EDuration="2.996257261s" podCreationTimestamp="2026-02-02 17:09:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:33.971882282 +0000 UTC m=+1165.593486372" watchObservedRunningTime="2026-02-02 17:09:33.996257261 +0000 UTC m=+1165.617861341" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.023653 4835 scope.go:117] "RemoveContainer" containerID="e260eb78d58f6b9ffabef4d55a64e5a25831db6f1321808ab6d38ec936c752e4" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.024736 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-mgjw6"] Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.037773 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-mgjw6"] Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.363579 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.507552 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.586696 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-internal-tls-certs\") pod \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.586780 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-public-tls-certs\") pod \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.586833 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-combined-ca-bundle\") pod \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.586908 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnn5f\" (UniqueName: \"kubernetes.io/projected/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-kube-api-access-pnn5f\") pod \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.586972 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data\") pod \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.587046 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-logs\") pod \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.587087 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data-custom\") pod \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\" (UID: \"b4cf63eb-ca5d-4d27-8aff-b659a19a7938\") " Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.589834 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-logs" (OuterVolumeSpecName: "logs") pod "b4cf63eb-ca5d-4d27-8aff-b659a19a7938" (UID: "b4cf63eb-ca5d-4d27-8aff-b659a19a7938"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.611234 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-kube-api-access-pnn5f" (OuterVolumeSpecName: "kube-api-access-pnn5f") pod "b4cf63eb-ca5d-4d27-8aff-b659a19a7938" (UID: "b4cf63eb-ca5d-4d27-8aff-b659a19a7938"). InnerVolumeSpecName "kube-api-access-pnn5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.619169 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b4cf63eb-ca5d-4d27-8aff-b659a19a7938" (UID: "b4cf63eb-ca5d-4d27-8aff-b659a19a7938"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.637012 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-b8f544fd4-zp9bk"] Feb 02 17:09:34 crc kubenswrapper[4835]: E0202 17:09:34.637445 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerName="barbican-api-log" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.637464 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerName="barbican-api-log" Feb 02 17:09:34 crc kubenswrapper[4835]: E0202 17:09:34.637487 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9768010f-5f61-48d8-883c-d6cf020cfdf1" containerName="dnsmasq-dns" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.637493 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="9768010f-5f61-48d8-883c-d6cf020cfdf1" containerName="dnsmasq-dns" Feb 02 17:09:34 crc kubenswrapper[4835]: E0202 17:09:34.637501 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerName="barbican-api" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.637507 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerName="barbican-api" Feb 02 17:09:34 crc kubenswrapper[4835]: E0202 17:09:34.637516 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9768010f-5f61-48d8-883c-d6cf020cfdf1" containerName="init" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.637521 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="9768010f-5f61-48d8-883c-d6cf020cfdf1" containerName="init" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.637668 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerName="barbican-api-log" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.637682 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="9768010f-5f61-48d8-883c-d6cf020cfdf1" containerName="dnsmasq-dns" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.637692 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerName="barbican-api" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.638579 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.667440 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4cf63eb-ca5d-4d27-8aff-b659a19a7938" (UID: "b4cf63eb-ca5d-4d27-8aff-b659a19a7938"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.670797 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b8f544fd4-zp9bk"] Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.688353 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnjkc\" (UniqueName: \"kubernetes.io/projected/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-kube-api-access-hnjkc\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.688412 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-combined-ca-bundle\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.688492 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-public-tls-certs\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.688526 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-config-data\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.688570 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-internal-tls-certs\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.688622 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-scripts\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.688697 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-logs\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.688746 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnn5f\" (UniqueName: \"kubernetes.io/projected/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-kube-api-access-pnn5f\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.688756 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.688766 4835 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.688775 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.695744 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b4cf63eb-ca5d-4d27-8aff-b659a19a7938" (UID: "b4cf63eb-ca5d-4d27-8aff-b659a19a7938"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.703439 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data" (OuterVolumeSpecName: "config-data") pod "b4cf63eb-ca5d-4d27-8aff-b659a19a7938" (UID: "b4cf63eb-ca5d-4d27-8aff-b659a19a7938"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.731192 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b4cf63eb-ca5d-4d27-8aff-b659a19a7938" (UID: "b4cf63eb-ca5d-4d27-8aff-b659a19a7938"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.790047 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnjkc\" (UniqueName: \"kubernetes.io/projected/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-kube-api-access-hnjkc\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.790108 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-combined-ca-bundle\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.790172 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-public-tls-certs\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.790221 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-config-data\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.790264 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-internal-tls-certs\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.790346 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-scripts\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.790389 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-logs\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.790455 4835 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.790480 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.790492 4835 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cf63eb-ca5d-4d27-8aff-b659a19a7938-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.791000 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-logs\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.794322 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-scripts\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.794870 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-internal-tls-certs\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.794980 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-combined-ca-bundle\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.795068 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-public-tls-certs\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.795988 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-config-data\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.805809 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnjkc\" (UniqueName: \"kubernetes.io/projected/b37e6604-22e9-4e3d-8b9e-27ac0fccad12-kube-api-access-hnjkc\") pod \"placement-b8f544fd4-zp9bk\" (UID: \"b37e6604-22e9-4e3d-8b9e-27ac0fccad12\") " pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.962053 4835 generic.go:334] "Generic (PLEG): container finished" podID="05db14b9-403d-4650-8f5a-bb1d0c9be695" containerID="7239f5326c9db6a9d06b21d2f54aeadf195425d961c4fcfddb47d183eca9fcaf" exitCode=0 Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.962125 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"05db14b9-403d-4650-8f5a-bb1d0c9be695","Type":"ContainerDied","Data":"7239f5326c9db6a9d06b21d2f54aeadf195425d961c4fcfddb47d183eca9fcaf"} Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.964156 4835 generic.go:334] "Generic (PLEG): container finished" podID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" containerID="7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9" exitCode=0 Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.964214 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-97d9cb6c4-nx6lx" event={"ID":"b4cf63eb-ca5d-4d27-8aff-b659a19a7938","Type":"ContainerDied","Data":"7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9"} Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.964233 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-97d9cb6c4-nx6lx" event={"ID":"b4cf63eb-ca5d-4d27-8aff-b659a19a7938","Type":"ContainerDied","Data":"b8b61221be9f92cbcdd4971510eb97d4a53198f461b79ae1e52e12ffae215b1d"} Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.964217 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-97d9cb6c4-nx6lx" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.964250 4835 scope.go:117] "RemoveContainer" containerID="7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9" Feb 02 17:09:34 crc kubenswrapper[4835]: I0202 17:09:34.990733 4835 scope.go:117] "RemoveContainer" containerID="e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.016303 4835 scope.go:117] "RemoveContainer" containerID="7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9" Feb 02 17:09:35 crc kubenswrapper[4835]: E0202 17:09:35.017041 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9\": container with ID starting with 7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9 not found: ID does not exist" containerID="7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.017115 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9"} err="failed to get container status \"7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9\": rpc error: code = NotFound desc = could not find container \"7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9\": container with ID starting with 7aaa8225a262e6787f6ce93087938f7e6116c60382a87093aae00456b28536c9 not found: ID does not exist" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.017171 4835 scope.go:117] "RemoveContainer" containerID="e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173" Feb 02 17:09:35 crc kubenswrapper[4835]: E0202 17:09:35.017545 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173\": container with ID starting with e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173 not found: ID does not exist" containerID="e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.017585 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173"} err="failed to get container status \"e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173\": rpc error: code = NotFound desc = could not find container \"e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173\": container with ID starting with e30f114c672443103289b1be8c74a9e456b319de8d18a26b245ed15db55d1173 not found: ID does not exist" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.022226 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-97d9cb6c4-nx6lx"] Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.032526 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-97d9cb6c4-nx6lx"] Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.082712 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.237025 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9768010f-5f61-48d8-883c-d6cf020cfdf1" path="/var/lib/kubelet/pods/9768010f-5f61-48d8-883c-d6cf020cfdf1/volumes" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.241965 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4cf63eb-ca5d-4d27-8aff-b659a19a7938" path="/var/lib/kubelet/pods/b4cf63eb-ca5d-4d27-8aff-b659a19a7938/volumes" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.641383 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b8f544fd4-zp9bk"] Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.898165 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.929182 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bk2c\" (UniqueName: \"kubernetes.io/projected/8b10dcfd-03a0-478a-87c1-f6d87260571a-kube-api-access-6bk2c\") pod \"8b10dcfd-03a0-478a-87c1-f6d87260571a\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.929286 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-public-tls-certs\") pod \"8b10dcfd-03a0-478a-87c1-f6d87260571a\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.929331 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-ovndb-tls-certs\") pod \"8b10dcfd-03a0-478a-87c1-f6d87260571a\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.929375 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-internal-tls-certs\") pod \"8b10dcfd-03a0-478a-87c1-f6d87260571a\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.929421 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-combined-ca-bundle\") pod \"8b10dcfd-03a0-478a-87c1-f6d87260571a\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.929447 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-config\") pod \"8b10dcfd-03a0-478a-87c1-f6d87260571a\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.929466 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-httpd-config\") pod \"8b10dcfd-03a0-478a-87c1-f6d87260571a\" (UID: \"8b10dcfd-03a0-478a-87c1-f6d87260571a\") " Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.934559 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "8b10dcfd-03a0-478a-87c1-f6d87260571a" (UID: "8b10dcfd-03a0-478a-87c1-f6d87260571a"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.947349 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b10dcfd-03a0-478a-87c1-f6d87260571a-kube-api-access-6bk2c" (OuterVolumeSpecName: "kube-api-access-6bk2c") pod "8b10dcfd-03a0-478a-87c1-f6d87260571a" (UID: "8b10dcfd-03a0-478a-87c1-f6d87260571a"). InnerVolumeSpecName "kube-api-access-6bk2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.982476 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b8f544fd4-zp9bk" event={"ID":"b37e6604-22e9-4e3d-8b9e-27ac0fccad12","Type":"ContainerStarted","Data":"6ec0a686970ad3d9a9cae32f6b9cf4126a70f5a1c6ed2889e18adc36a39c91c2"} Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.985460 4835 generic.go:334] "Generic (PLEG): container finished" podID="8b10dcfd-03a0-478a-87c1-f6d87260571a" containerID="5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5" exitCode=0 Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.985520 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7874ff7b65-42jrg" event={"ID":"8b10dcfd-03a0-478a-87c1-f6d87260571a","Type":"ContainerDied","Data":"5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5"} Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.985543 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7874ff7b65-42jrg" event={"ID":"8b10dcfd-03a0-478a-87c1-f6d87260571a","Type":"ContainerDied","Data":"b6136af9e8adaf5b17a0b52c9ea1c82b602fc3de08dc4a78cb529512c3292f52"} Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.985559 4835 scope.go:117] "RemoveContainer" containerID="e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.985683 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7874ff7b65-42jrg" Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.991663 4835 generic.go:334] "Generic (PLEG): container finished" podID="05db14b9-403d-4650-8f5a-bb1d0c9be695" containerID="22e12ea32d74ea8bf72af328ee396e87f2d0e8e5d1a3dee6a7905471a0a69cf2" exitCode=0 Feb 02 17:09:35 crc kubenswrapper[4835]: I0202 17:09:35.991737 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"05db14b9-403d-4650-8f5a-bb1d0c9be695","Type":"ContainerDied","Data":"22e12ea32d74ea8bf72af328ee396e87f2d0e8e5d1a3dee6a7905471a0a69cf2"} Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.009925 4835 scope.go:117] "RemoveContainer" containerID="5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.033611 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bk2c\" (UniqueName: \"kubernetes.io/projected/8b10dcfd-03a0-478a-87c1-f6d87260571a-kube-api-access-6bk2c\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.033638 4835 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.056106 4835 scope.go:117] "RemoveContainer" containerID="e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4" Feb 02 17:09:36 crc kubenswrapper[4835]: E0202 17:09:36.056639 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4\": container with ID starting with e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4 not found: ID does not exist" containerID="e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.056691 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4"} err="failed to get container status \"e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4\": rpc error: code = NotFound desc = could not find container \"e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4\": container with ID starting with e4a0158effc0af943816116bad433a8111a6c309fbd87938a8bd29d1bf30f4a4 not found: ID does not exist" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.056722 4835 scope.go:117] "RemoveContainer" containerID="5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5" Feb 02 17:09:36 crc kubenswrapper[4835]: E0202 17:09:36.057062 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5\": container with ID starting with 5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5 not found: ID does not exist" containerID="5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.057096 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5"} err="failed to get container status \"5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5\": rpc error: code = NotFound desc = could not find container \"5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5\": container with ID starting with 5c3c7299fb2f9b5649af6d0b6eb07205267d5bfbb175c96b78e6988f11dd29d5 not found: ID does not exist" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.105502 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8b10dcfd-03a0-478a-87c1-f6d87260571a" (UID: "8b10dcfd-03a0-478a-87c1-f6d87260571a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.108655 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b10dcfd-03a0-478a-87c1-f6d87260571a" (UID: "8b10dcfd-03a0-478a-87c1-f6d87260571a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.115502 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-config" (OuterVolumeSpecName: "config") pod "8b10dcfd-03a0-478a-87c1-f6d87260571a" (UID: "8b10dcfd-03a0-478a-87c1-f6d87260571a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.121468 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8b10dcfd-03a0-478a-87c1-f6d87260571a" (UID: "8b10dcfd-03a0-478a-87c1-f6d87260571a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.124783 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "8b10dcfd-03a0-478a-87c1-f6d87260571a" (UID: "8b10dcfd-03a0-478a-87c1-f6d87260571a"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.135116 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.135143 4835 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.135152 4835 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.135161 4835 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.135170 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b10dcfd-03a0-478a-87c1-f6d87260571a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.172002 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.236467 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data\") pod \"05db14b9-403d-4650-8f5a-bb1d0c9be695\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.236706 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-combined-ca-bundle\") pod \"05db14b9-403d-4650-8f5a-bb1d0c9be695\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.236743 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/05db14b9-403d-4650-8f5a-bb1d0c9be695-etc-machine-id\") pod \"05db14b9-403d-4650-8f5a-bb1d0c9be695\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.236879 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8bdb\" (UniqueName: \"kubernetes.io/projected/05db14b9-403d-4650-8f5a-bb1d0c9be695-kube-api-access-k8bdb\") pod \"05db14b9-403d-4650-8f5a-bb1d0c9be695\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.236949 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-scripts\") pod \"05db14b9-403d-4650-8f5a-bb1d0c9be695\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.237001 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data-custom\") pod \"05db14b9-403d-4650-8f5a-bb1d0c9be695\" (UID: \"05db14b9-403d-4650-8f5a-bb1d0c9be695\") " Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.237165 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/05db14b9-403d-4650-8f5a-bb1d0c9be695-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "05db14b9-403d-4650-8f5a-bb1d0c9be695" (UID: "05db14b9-403d-4650-8f5a-bb1d0c9be695"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.238792 4835 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/05db14b9-403d-4650-8f5a-bb1d0c9be695-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.243586 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05db14b9-403d-4650-8f5a-bb1d0c9be695-kube-api-access-k8bdb" (OuterVolumeSpecName: "kube-api-access-k8bdb") pod "05db14b9-403d-4650-8f5a-bb1d0c9be695" (UID: "05db14b9-403d-4650-8f5a-bb1d0c9be695"). InnerVolumeSpecName "kube-api-access-k8bdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.254349 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-scripts" (OuterVolumeSpecName: "scripts") pod "05db14b9-403d-4650-8f5a-bb1d0c9be695" (UID: "05db14b9-403d-4650-8f5a-bb1d0c9be695"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.255330 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "05db14b9-403d-4650-8f5a-bb1d0c9be695" (UID: "05db14b9-403d-4650-8f5a-bb1d0c9be695"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.308388 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05db14b9-403d-4650-8f5a-bb1d0c9be695" (UID: "05db14b9-403d-4650-8f5a-bb1d0c9be695"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.323147 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7874ff7b65-42jrg"] Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.335814 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7874ff7b65-42jrg"] Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.340199 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.340231 4835 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.340239 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.340247 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8bdb\" (UniqueName: \"kubernetes.io/projected/05db14b9-403d-4650-8f5a-bb1d0c9be695-kube-api-access-k8bdb\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.361515 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data" (OuterVolumeSpecName: "config-data") pod "05db14b9-403d-4650-8f5a-bb1d0c9be695" (UID: "05db14b9-403d-4650-8f5a-bb1d0c9be695"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:36 crc kubenswrapper[4835]: I0202 17:09:36.442465 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05db14b9-403d-4650-8f5a-bb1d0c9be695-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.003356 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b8f544fd4-zp9bk" event={"ID":"b37e6604-22e9-4e3d-8b9e-27ac0fccad12","Type":"ContainerStarted","Data":"ed09b715089f4b5cd4576bf2874ea08c2a1ac1abc377cfd1d92fdab41276d1b8"} Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.003692 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b8f544fd4-zp9bk" event={"ID":"b37e6604-22e9-4e3d-8b9e-27ac0fccad12","Type":"ContainerStarted","Data":"c3bacf3e2298e0f14dd70e8972b8ec1864be6a5e5479f76a527b1150a89120c3"} Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.003882 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.003893 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.006969 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"05db14b9-403d-4650-8f5a-bb1d0c9be695","Type":"ContainerDied","Data":"b9a1b41d348b26bb4ff50bb4d2984d938052b166a80ae47386dfb77eb9696c7a"} Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.007008 4835 scope.go:117] "RemoveContainer" containerID="7239f5326c9db6a9d06b21d2f54aeadf195425d961c4fcfddb47d183eca9fcaf" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.007169 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.036987 4835 scope.go:117] "RemoveContainer" containerID="22e12ea32d74ea8bf72af328ee396e87f2d0e8e5d1a3dee6a7905471a0a69cf2" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.039935 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-b8f544fd4-zp9bk" podStartSLOduration=3.039913439 podStartE2EDuration="3.039913439s" podCreationTimestamp="2026-02-02 17:09:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:37.029682119 +0000 UTC m=+1168.651286199" watchObservedRunningTime="2026-02-02 17:09:37.039913439 +0000 UTC m=+1168.661517519" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.064448 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.073352 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.086004 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 17:09:37 crc kubenswrapper[4835]: E0202 17:09:37.086419 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b10dcfd-03a0-478a-87c1-f6d87260571a" containerName="neutron-httpd" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.086437 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b10dcfd-03a0-478a-87c1-f6d87260571a" containerName="neutron-httpd" Feb 02 17:09:37 crc kubenswrapper[4835]: E0202 17:09:37.086460 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05db14b9-403d-4650-8f5a-bb1d0c9be695" containerName="probe" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.086468 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="05db14b9-403d-4650-8f5a-bb1d0c9be695" containerName="probe" Feb 02 17:09:37 crc kubenswrapper[4835]: E0202 17:09:37.086499 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05db14b9-403d-4650-8f5a-bb1d0c9be695" containerName="cinder-scheduler" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.086507 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="05db14b9-403d-4650-8f5a-bb1d0c9be695" containerName="cinder-scheduler" Feb 02 17:09:37 crc kubenswrapper[4835]: E0202 17:09:37.086524 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b10dcfd-03a0-478a-87c1-f6d87260571a" containerName="neutron-api" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.086531 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b10dcfd-03a0-478a-87c1-f6d87260571a" containerName="neutron-api" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.086698 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="05db14b9-403d-4650-8f5a-bb1d0c9be695" containerName="cinder-scheduler" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.086718 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b10dcfd-03a0-478a-87c1-f6d87260571a" containerName="neutron-httpd" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.086738 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b10dcfd-03a0-478a-87c1-f6d87260571a" containerName="neutron-api" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.086749 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="05db14b9-403d-4650-8f5a-bb1d0c9be695" containerName="probe" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.088496 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.096036 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.097254 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.154668 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-config-data\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.154746 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.154873 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-scripts\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.154922 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.154952 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.155018 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s28rn\" (UniqueName: \"kubernetes.io/projected/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-kube-api-access-s28rn\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.199675 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05db14b9-403d-4650-8f5a-bb1d0c9be695" path="/var/lib/kubelet/pods/05db14b9-403d-4650-8f5a-bb1d0c9be695/volumes" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.200495 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b10dcfd-03a0-478a-87c1-f6d87260571a" path="/var/lib/kubelet/pods/8b10dcfd-03a0-478a-87c1-f6d87260571a/volumes" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.255622 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-scripts\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.255662 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.255682 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.255722 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s28rn\" (UniqueName: \"kubernetes.io/projected/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-kube-api-access-s28rn\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.255810 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-config-data\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.255830 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.258659 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.259665 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.262943 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.263157 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-scripts\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.263297 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-config-data\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.280917 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s28rn\" (UniqueName: \"kubernetes.io/projected/9c6fe27c-e17a-4f0f-bc50-21b8d1b49081-kube-api-access-s28rn\") pod \"cinder-scheduler-0\" (UID: \"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081\") " pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.423188 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 17:09:37 crc kubenswrapper[4835]: I0202 17:09:37.920048 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 17:09:38 crc kubenswrapper[4835]: I0202 17:09:38.022525 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081","Type":"ContainerStarted","Data":"fe039aec2fa1dc5e8acec041074b3642f4de1daaf029617f6b8117d2a258ce7b"} Feb 02 17:09:38 crc kubenswrapper[4835]: I0202 17:09:38.789295 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-577d94f4db-mdlkk" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.041338 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081","Type":"ContainerStarted","Data":"f574ea6d4d4b2cafa9a242a17758ccd7ef76c0ef8dda12f76100b0c1791b957f"} Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.050405 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.321248 4835 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podaf37c205-e3c2-43d3-a0df-9fb3e8629f87"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podaf37c205-e3c2-43d3-a0df-9fb3e8629f87] : Timed out while waiting for systemd to remove kubepods-besteffort-podaf37c205_e3c2_43d3_a0df_9fb3e8629f87.slice" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.526403 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.527859 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.530119 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.530135 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.530640 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-gbx5c" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.534055 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.603024 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config-secret\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.603083 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-combined-ca-bundle\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.603128 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.603158 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj8w9\" (UniqueName: \"kubernetes.io/projected/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-kube-api-access-xj8w9\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.704765 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config-secret\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.705095 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-combined-ca-bundle\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.705133 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.705152 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj8w9\" (UniqueName: \"kubernetes.io/projected/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-kube-api-access-xj8w9\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.706084 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.709779 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-combined-ca-bundle\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.709883 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config-secret\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.725520 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj8w9\" (UniqueName: \"kubernetes.io/projected/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-kube-api-access-xj8w9\") pod \"openstackclient\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.817649 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.818477 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.834614 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.892720 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.894130 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.910634 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a73ab577-2970-4e91-bbde-344bd924ba2c-openstack-config\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.910863 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a73ab577-2970-4e91-bbde-344bd924ba2c-openstack-config-secret\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.911033 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a73ab577-2970-4e91-bbde-344bd924ba2c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.911131 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7jqh\" (UniqueName: \"kubernetes.io/projected/a73ab577-2970-4e91-bbde-344bd924ba2c-kube-api-access-b7jqh\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:39 crc kubenswrapper[4835]: I0202 17:09:39.912897 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 02 17:09:39 crc kubenswrapper[4835]: E0202 17:09:39.954871 4835 log.go:32] "RunPodSandbox from runtime service failed" err=< Feb 02 17:09:39 crc kubenswrapper[4835]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_aafe6be9-6093-43e0-9e8c-8a3bb125c2e5_0(71180b1eed5f7efb0cf0a3ef1801fb0d405c0bc46ba5f5b556ec83d0d3c9a61e): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"71180b1eed5f7efb0cf0a3ef1801fb0d405c0bc46ba5f5b556ec83d0d3c9a61e" Netns:"/var/run/netns/e461f458-0f12-4e8b-ae2b-45779755c8e4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=71180b1eed5f7efb0cf0a3ef1801fb0d405c0bc46ba5f5b556ec83d0d3c9a61e;K8S_POD_UID=aafe6be9-6093-43e0-9e8c-8a3bb125c2e5" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5]: expected pod UID "aafe6be9-6093-43e0-9e8c-8a3bb125c2e5" but got "a73ab577-2970-4e91-bbde-344bd924ba2c" from Kube API Feb 02 17:09:39 crc kubenswrapper[4835]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 02 17:09:39 crc kubenswrapper[4835]: > Feb 02 17:09:39 crc kubenswrapper[4835]: E0202 17:09:39.954941 4835 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Feb 02 17:09:39 crc kubenswrapper[4835]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_aafe6be9-6093-43e0-9e8c-8a3bb125c2e5_0(71180b1eed5f7efb0cf0a3ef1801fb0d405c0bc46ba5f5b556ec83d0d3c9a61e): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"71180b1eed5f7efb0cf0a3ef1801fb0d405c0bc46ba5f5b556ec83d0d3c9a61e" Netns:"/var/run/netns/e461f458-0f12-4e8b-ae2b-45779755c8e4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=71180b1eed5f7efb0cf0a3ef1801fb0d405c0bc46ba5f5b556ec83d0d3c9a61e;K8S_POD_UID=aafe6be9-6093-43e0-9e8c-8a3bb125c2e5" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5]: expected pod UID "aafe6be9-6093-43e0-9e8c-8a3bb125c2e5" but got "a73ab577-2970-4e91-bbde-344bd924ba2c" from Kube API Feb 02 17:09:39 crc kubenswrapper[4835]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 02 17:09:39 crc kubenswrapper[4835]: > pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.013421 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a73ab577-2970-4e91-bbde-344bd924ba2c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.013667 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7jqh\" (UniqueName: \"kubernetes.io/projected/a73ab577-2970-4e91-bbde-344bd924ba2c-kube-api-access-b7jqh\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.013818 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a73ab577-2970-4e91-bbde-344bd924ba2c-openstack-config\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.013875 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a73ab577-2970-4e91-bbde-344bd924ba2c-openstack-config-secret\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.015712 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a73ab577-2970-4e91-bbde-344bd924ba2c-openstack-config\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.019238 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a73ab577-2970-4e91-bbde-344bd924ba2c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.021810 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a73ab577-2970-4e91-bbde-344bd924ba2c-openstack-config-secret\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.033583 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7jqh\" (UniqueName: \"kubernetes.io/projected/a73ab577-2970-4e91-bbde-344bd924ba2c-kube-api-access-b7jqh\") pod \"openstackclient\" (UID: \"a73ab577-2970-4e91-bbde-344bd924ba2c\") " pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.073326 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.074818 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9c6fe27c-e17a-4f0f-bc50-21b8d1b49081","Type":"ContainerStarted","Data":"26d64e370eaef95d20890c77bb93ca10f3374c3fdd12bb5dfdda7005266877ed"} Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.115468 4835 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="aafe6be9-6093-43e0-9e8c-8a3bb125c2e5" podUID="a73ab577-2970-4e91-bbde-344bd924ba2c" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.123298 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.303160 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.320443 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-combined-ca-bundle\") pod \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.320595 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config\") pod \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.320660 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xj8w9\" (UniqueName: \"kubernetes.io/projected/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-kube-api-access-xj8w9\") pod \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.320688 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config-secret\") pod \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\" (UID: \"aafe6be9-6093-43e0-9e8c-8a3bb125c2e5\") " Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.321848 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "aafe6be9-6093-43e0-9e8c-8a3bb125c2e5" (UID: "aafe6be9-6093-43e0-9e8c-8a3bb125c2e5"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.328487 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aafe6be9-6093-43e0-9e8c-8a3bb125c2e5" (UID: "aafe6be9-6093-43e0-9e8c-8a3bb125c2e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.334081 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-kube-api-access-xj8w9" (OuterVolumeSpecName: "kube-api-access-xj8w9") pod "aafe6be9-6093-43e0-9e8c-8a3bb125c2e5" (UID: "aafe6be9-6093-43e0-9e8c-8a3bb125c2e5"). InnerVolumeSpecName "kube-api-access-xj8w9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.341869 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "aafe6be9-6093-43e0-9e8c-8a3bb125c2e5" (UID: "aafe6be9-6093-43e0-9e8c-8a3bb125c2e5"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.423091 4835 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.423133 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xj8w9\" (UniqueName: \"kubernetes.io/projected/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-kube-api-access-xj8w9\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.423148 4835 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.423160 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.753939 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.753924449 podStartE2EDuration="3.753924449s" podCreationTimestamp="2026-02-02 17:09:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:09:40.093761713 +0000 UTC m=+1171.715365793" watchObservedRunningTime="2026-02-02 17:09:40.753924449 +0000 UTC m=+1172.375528529" Feb 02 17:09:40 crc kubenswrapper[4835]: I0202 17:09:40.760494 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 02 17:09:41 crc kubenswrapper[4835]: I0202 17:09:41.089365 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a73ab577-2970-4e91-bbde-344bd924ba2c","Type":"ContainerStarted","Data":"9935a4645b40b850efd73b8d617039a0c9d9fe6f953b1874089eac1c785087e2"} Feb 02 17:09:41 crc kubenswrapper[4835]: I0202 17:09:41.089424 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 17:09:41 crc kubenswrapper[4835]: I0202 17:09:41.092966 4835 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="aafe6be9-6093-43e0-9e8c-8a3bb125c2e5" podUID="a73ab577-2970-4e91-bbde-344bd924ba2c" Feb 02 17:09:41 crc kubenswrapper[4835]: I0202 17:09:41.201826 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aafe6be9-6093-43e0-9e8c-8a3bb125c2e5" path="/var/lib/kubelet/pods/aafe6be9-6093-43e0-9e8c-8a3bb125c2e5/volumes" Feb 02 17:09:42 crc kubenswrapper[4835]: I0202 17:09:42.423902 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 02 17:09:46 crc kubenswrapper[4835]: I0202 17:09:46.056339 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:46 crc kubenswrapper[4835]: I0202 17:09:46.057240 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="ceilometer-central-agent" containerID="cri-o://37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c" gracePeriod=30 Feb 02 17:09:46 crc kubenswrapper[4835]: I0202 17:09:46.058073 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="ceilometer-notification-agent" containerID="cri-o://9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db" gracePeriod=30 Feb 02 17:09:46 crc kubenswrapper[4835]: I0202 17:09:46.058109 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="sg-core" containerID="cri-o://e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254" gracePeriod=30 Feb 02 17:09:46 crc kubenswrapper[4835]: I0202 17:09:46.058114 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="proxy-httpd" containerID="cri-o://09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51" gracePeriod=30 Feb 02 17:09:46 crc kubenswrapper[4835]: I0202 17:09:46.065625 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.156:3000/\": EOF" Feb 02 17:09:46 crc kubenswrapper[4835]: E0202 17:09:46.322810 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19760545_1b23_461f_9e4b_d8b2d798fbcb.slice/crio-conmon-09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19760545_1b23_461f_9e4b_d8b2d798fbcb.slice/crio-09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51.scope\": RecentStats: unable to find data in memory cache]" Feb 02 17:09:47 crc kubenswrapper[4835]: I0202 17:09:47.149808 4835 generic.go:334] "Generic (PLEG): container finished" podID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerID="09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51" exitCode=0 Feb 02 17:09:47 crc kubenswrapper[4835]: I0202 17:09:47.150147 4835 generic.go:334] "Generic (PLEG): container finished" podID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerID="e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254" exitCode=2 Feb 02 17:09:47 crc kubenswrapper[4835]: I0202 17:09:47.150161 4835 generic.go:334] "Generic (PLEG): container finished" podID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerID="37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c" exitCode=0 Feb 02 17:09:47 crc kubenswrapper[4835]: I0202 17:09:47.150005 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19760545-1b23-461f-9e4b-d8b2d798fbcb","Type":"ContainerDied","Data":"09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51"} Feb 02 17:09:47 crc kubenswrapper[4835]: I0202 17:09:47.150210 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19760545-1b23-461f-9e4b-d8b2d798fbcb","Type":"ContainerDied","Data":"e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254"} Feb 02 17:09:47 crc kubenswrapper[4835]: I0202 17:09:47.150225 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19760545-1b23-461f-9e4b-d8b2d798fbcb","Type":"ContainerDied","Data":"37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c"} Feb 02 17:09:47 crc kubenswrapper[4835]: I0202 17:09:47.652254 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.168403 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a73ab577-2970-4e91-bbde-344bd924ba2c","Type":"ContainerStarted","Data":"5f59aeeda364a910ea824652d65a68108310a16a18ce7a9871097c2ff72becde"} Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.184527 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.052328289 podStartE2EDuration="10.184507402s" podCreationTimestamp="2026-02-02 17:09:39 +0000 UTC" firstStartedPulling="2026-02-02 17:09:40.763543151 +0000 UTC m=+1172.385147231" lastFinishedPulling="2026-02-02 17:09:48.895722264 +0000 UTC m=+1180.517326344" observedRunningTime="2026-02-02 17:09:49.180091137 +0000 UTC m=+1180.801695217" watchObservedRunningTime="2026-02-02 17:09:49.184507402 +0000 UTC m=+1180.806111482" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.732994 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.847129 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-run-httpd\") pod \"19760545-1b23-461f-9e4b-d8b2d798fbcb\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.847199 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-log-httpd\") pod \"19760545-1b23-461f-9e4b-d8b2d798fbcb\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.847222 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-config-data\") pod \"19760545-1b23-461f-9e4b-d8b2d798fbcb\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.847329 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-scripts\") pod \"19760545-1b23-461f-9e4b-d8b2d798fbcb\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.847384 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-combined-ca-bundle\") pod \"19760545-1b23-461f-9e4b-d8b2d798fbcb\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.847412 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22rmb\" (UniqueName: \"kubernetes.io/projected/19760545-1b23-461f-9e4b-d8b2d798fbcb-kube-api-access-22rmb\") pod \"19760545-1b23-461f-9e4b-d8b2d798fbcb\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.847438 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-sg-core-conf-yaml\") pod \"19760545-1b23-461f-9e4b-d8b2d798fbcb\" (UID: \"19760545-1b23-461f-9e4b-d8b2d798fbcb\") " Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.848842 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "19760545-1b23-461f-9e4b-d8b2d798fbcb" (UID: "19760545-1b23-461f-9e4b-d8b2d798fbcb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.850285 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "19760545-1b23-461f-9e4b-d8b2d798fbcb" (UID: "19760545-1b23-461f-9e4b-d8b2d798fbcb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.851907 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-scripts" (OuterVolumeSpecName: "scripts") pod "19760545-1b23-461f-9e4b-d8b2d798fbcb" (UID: "19760545-1b23-461f-9e4b-d8b2d798fbcb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.852364 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19760545-1b23-461f-9e4b-d8b2d798fbcb-kube-api-access-22rmb" (OuterVolumeSpecName: "kube-api-access-22rmb") pod "19760545-1b23-461f-9e4b-d8b2d798fbcb" (UID: "19760545-1b23-461f-9e4b-d8b2d798fbcb"). InnerVolumeSpecName "kube-api-access-22rmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.902903 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "19760545-1b23-461f-9e4b-d8b2d798fbcb" (UID: "19760545-1b23-461f-9e4b-d8b2d798fbcb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.949159 4835 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.949192 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.949202 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22rmb\" (UniqueName: \"kubernetes.io/projected/19760545-1b23-461f-9e4b-d8b2d798fbcb-kube-api-access-22rmb\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.949213 4835 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.949223 4835 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19760545-1b23-461f-9e4b-d8b2d798fbcb-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.987466 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19760545-1b23-461f-9e4b-d8b2d798fbcb" (UID: "19760545-1b23-461f-9e4b-d8b2d798fbcb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.994844 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-d4cr7"] Feb 02 17:09:49 crc kubenswrapper[4835]: E0202 17:09:49.995324 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="sg-core" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.995340 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="sg-core" Feb 02 17:09:49 crc kubenswrapper[4835]: E0202 17:09:49.995352 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="proxy-httpd" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.995359 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="proxy-httpd" Feb 02 17:09:49 crc kubenswrapper[4835]: E0202 17:09:49.995379 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="ceilometer-notification-agent" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.995385 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="ceilometer-notification-agent" Feb 02 17:09:49 crc kubenswrapper[4835]: E0202 17:09:49.995400 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="ceilometer-central-agent" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.995406 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="ceilometer-central-agent" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.995765 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="proxy-httpd" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.995784 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="sg-core" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.995803 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="ceilometer-central-agent" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.995811 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerName="ceilometer-notification-agent" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.996464 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d4cr7" Feb 02 17:09:49 crc kubenswrapper[4835]: I0202 17:09:49.996461 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-config-data" (OuterVolumeSpecName: "config-data") pod "19760545-1b23-461f-9e4b-d8b2d798fbcb" (UID: "19760545-1b23-461f-9e4b-d8b2d798fbcb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.015331 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-d4cr7"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.052697 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.052732 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19760545-1b23-461f-9e4b-d8b2d798fbcb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.075547 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-bm5gm"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.077037 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bm5gm" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.084632 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-bm5gm"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.154294 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a836f890-a488-4781-bafc-1e8a3b91f0a7-operator-scripts\") pod \"nova-api-db-create-d4cr7\" (UID: \"a836f890-a488-4781-bafc-1e8a3b91f0a7\") " pod="openstack/nova-api-db-create-d4cr7" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.158536 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtzfj\" (UniqueName: \"kubernetes.io/projected/a836f890-a488-4781-bafc-1e8a3b91f0a7-kube-api-access-xtzfj\") pod \"nova-api-db-create-d4cr7\" (UID: \"a836f890-a488-4781-bafc-1e8a3b91f0a7\") " pod="openstack/nova-api-db-create-d4cr7" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.179841 4835 generic.go:334] "Generic (PLEG): container finished" podID="19760545-1b23-461f-9e4b-d8b2d798fbcb" containerID="9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db" exitCode=0 Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.179948 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.179945 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19760545-1b23-461f-9e4b-d8b2d798fbcb","Type":"ContainerDied","Data":"9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db"} Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.180012 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19760545-1b23-461f-9e4b-d8b2d798fbcb","Type":"ContainerDied","Data":"a97273058c7f02b460228d0abf224ce8525bff50089887f88ecd25830508a089"} Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.180036 4835 scope.go:117] "RemoveContainer" containerID="09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.185368 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-a3b3-account-create-update-dq7zp"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.186416 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a3b3-account-create-update-dq7zp" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.190201 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.201616 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a3b3-account-create-update-dq7zp"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.211892 4835 scope.go:117] "RemoveContainer" containerID="e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.236168 4835 scope.go:117] "RemoveContainer" containerID="9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.236921 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.253205 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.260589 4835 scope.go:117] "RemoveContainer" containerID="37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.261414 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/118439ce-dfb4-462c-91f5-c989b2f82f1b-operator-scripts\") pod \"nova-cell0-db-create-bm5gm\" (UID: \"118439ce-dfb4-462c-91f5-c989b2f82f1b\") " pod="openstack/nova-cell0-db-create-bm5gm" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.261605 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtzfj\" (UniqueName: \"kubernetes.io/projected/a836f890-a488-4781-bafc-1e8a3b91f0a7-kube-api-access-xtzfj\") pod \"nova-api-db-create-d4cr7\" (UID: \"a836f890-a488-4781-bafc-1e8a3b91f0a7\") " pod="openstack/nova-api-db-create-d4cr7" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.261759 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a836f890-a488-4781-bafc-1e8a3b91f0a7-operator-scripts\") pod \"nova-api-db-create-d4cr7\" (UID: \"a836f890-a488-4781-bafc-1e8a3b91f0a7\") " pod="openstack/nova-api-db-create-d4cr7" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.261938 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbp5s\" (UniqueName: \"kubernetes.io/projected/118439ce-dfb4-462c-91f5-c989b2f82f1b-kube-api-access-rbp5s\") pod \"nova-cell0-db-create-bm5gm\" (UID: \"118439ce-dfb4-462c-91f5-c989b2f82f1b\") " pod="openstack/nova-cell0-db-create-bm5gm" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.263097 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a836f890-a488-4781-bafc-1e8a3b91f0a7-operator-scripts\") pod \"nova-api-db-create-d4cr7\" (UID: \"a836f890-a488-4781-bafc-1e8a3b91f0a7\") " pod="openstack/nova-api-db-create-d4cr7" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.274999 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.277446 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.284619 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.284821 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.300122 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.301385 4835 scope.go:117] "RemoveContainer" containerID="09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.303826 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-gcjzv"] Feb 02 17:09:50 crc kubenswrapper[4835]: E0202 17:09:50.304793 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51\": container with ID starting with 09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51 not found: ID does not exist" containerID="09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.304833 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51"} err="failed to get container status \"09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51\": rpc error: code = NotFound desc = could not find container \"09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51\": container with ID starting with 09684b64a240fbaae619d4a02e640d3cfa2b0fc3a8f7e6be2ecc67d57c317e51 not found: ID does not exist" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.304860 4835 scope.go:117] "RemoveContainer" containerID="e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254" Feb 02 17:09:50 crc kubenswrapper[4835]: E0202 17:09:50.305140 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254\": container with ID starting with e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254 not found: ID does not exist" containerID="e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.305159 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254"} err="failed to get container status \"e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254\": rpc error: code = NotFound desc = could not find container \"e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254\": container with ID starting with e7686ef2e7aff5aa14d632941c3365f99737c9d8e8aaa619bedd976da59ec254 not found: ID does not exist" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.305171 4835 scope.go:117] "RemoveContainer" containerID="9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db" Feb 02 17:09:50 crc kubenswrapper[4835]: E0202 17:09:50.305485 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db\": container with ID starting with 9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db not found: ID does not exist" containerID="9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.305505 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gcjzv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.305503 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db"} err="failed to get container status \"9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db\": rpc error: code = NotFound desc = could not find container \"9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db\": container with ID starting with 9928dc32b8c77b37425639bb4a1833d3f8c1f3244292d4dec05cbe227b3a49db not found: ID does not exist" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.305604 4835 scope.go:117] "RemoveContainer" containerID="37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c" Feb 02 17:09:50 crc kubenswrapper[4835]: E0202 17:09:50.305792 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c\": container with ID starting with 37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c not found: ID does not exist" containerID="37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.305806 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c"} err="failed to get container status \"37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c\": rpc error: code = NotFound desc = could not find container \"37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c\": container with ID starting with 37af64ca6396732bf444baef87c099f517cd7379960f012dcde4016013f5e73c not found: ID does not exist" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.310736 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtzfj\" (UniqueName: \"kubernetes.io/projected/a836f890-a488-4781-bafc-1e8a3b91f0a7-kube-api-access-xtzfj\") pod \"nova-api-db-create-d4cr7\" (UID: \"a836f890-a488-4781-bafc-1e8a3b91f0a7\") " pod="openstack/nova-api-db-create-d4cr7" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.350258 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-gcjzv"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.362958 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d4cr7" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.364599 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/118439ce-dfb4-462c-91f5-c989b2f82f1b-operator-scripts\") pod \"nova-cell0-db-create-bm5gm\" (UID: \"118439ce-dfb4-462c-91f5-c989b2f82f1b\") " pod="openstack/nova-cell0-db-create-bm5gm" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.385518 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhwr7\" (UniqueName: \"kubernetes.io/projected/b157515a-ef65-4c3c-9eb1-b015cf54a845-kube-api-access-jhwr7\") pod \"nova-api-a3b3-account-create-update-dq7zp\" (UID: \"b157515a-ef65-4c3c-9eb1-b015cf54a845\") " pod="openstack/nova-api-a3b3-account-create-update-dq7zp" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.385734 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbp5s\" (UniqueName: \"kubernetes.io/projected/118439ce-dfb4-462c-91f5-c989b2f82f1b-kube-api-access-rbp5s\") pod \"nova-cell0-db-create-bm5gm\" (UID: \"118439ce-dfb4-462c-91f5-c989b2f82f1b\") " pod="openstack/nova-cell0-db-create-bm5gm" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.385827 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b157515a-ef65-4c3c-9eb1-b015cf54a845-operator-scripts\") pod \"nova-api-a3b3-account-create-update-dq7zp\" (UID: \"b157515a-ef65-4c3c-9eb1-b015cf54a845\") " pod="openstack/nova-api-a3b3-account-create-update-dq7zp" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.387234 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/118439ce-dfb4-462c-91f5-c989b2f82f1b-operator-scripts\") pod \"nova-cell0-db-create-bm5gm\" (UID: \"118439ce-dfb4-462c-91f5-c989b2f82f1b\") " pod="openstack/nova-cell0-db-create-bm5gm" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.402551 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbp5s\" (UniqueName: \"kubernetes.io/projected/118439ce-dfb4-462c-91f5-c989b2f82f1b-kube-api-access-rbp5s\") pod \"nova-cell0-db-create-bm5gm\" (UID: \"118439ce-dfb4-462c-91f5-c989b2f82f1b\") " pod="openstack/nova-cell0-db-create-bm5gm" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.408363 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-5861-account-create-update-8fgpf"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.409515 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5861-account-create-update-8fgpf" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.411720 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.415306 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5861-account-create-update-8fgpf"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.531666 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b157515a-ef65-4c3c-9eb1-b015cf54a845-operator-scripts\") pod \"nova-api-a3b3-account-create-update-dq7zp\" (UID: \"b157515a-ef65-4c3c-9eb1-b015cf54a845\") " pod="openstack/nova-api-a3b3-account-create-update-dq7zp" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.531747 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/732f4945-296c-4365-8854-d4633be82d41-operator-scripts\") pod \"nova-cell0-5861-account-create-update-8fgpf\" (UID: \"732f4945-296c-4365-8854-d4633be82d41\") " pod="openstack/nova-cell0-5861-account-create-update-8fgpf" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.531796 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svpzh\" (UniqueName: \"kubernetes.io/projected/233822fd-9e0a-4c0c-8591-0fce2284f28c-kube-api-access-svpzh\") pod \"nova-cell1-db-create-gcjzv\" (UID: \"233822fd-9e0a-4c0c-8591-0fce2284f28c\") " pod="openstack/nova-cell1-db-create-gcjzv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.531827 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-run-httpd\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.532724 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233822fd-9e0a-4c0c-8591-0fce2284f28c-operator-scripts\") pod \"nova-cell1-db-create-gcjzv\" (UID: \"233822fd-9e0a-4c0c-8591-0fce2284f28c\") " pod="openstack/nova-cell1-db-create-gcjzv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.532754 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.532801 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-scripts\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.532852 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-config-data\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.532900 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msm9g\" (UniqueName: \"kubernetes.io/projected/51198eb2-0d86-405e-b7f6-aa3079520932-kube-api-access-msm9g\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.532970 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z59qh\" (UniqueName: \"kubernetes.io/projected/732f4945-296c-4365-8854-d4633be82d41-kube-api-access-z59qh\") pod \"nova-cell0-5861-account-create-update-8fgpf\" (UID: \"732f4945-296c-4365-8854-d4633be82d41\") " pod="openstack/nova-cell0-5861-account-create-update-8fgpf" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.532993 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-log-httpd\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.533053 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.533082 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhwr7\" (UniqueName: \"kubernetes.io/projected/b157515a-ef65-4c3c-9eb1-b015cf54a845-kube-api-access-jhwr7\") pod \"nova-api-a3b3-account-create-update-dq7zp\" (UID: \"b157515a-ef65-4c3c-9eb1-b015cf54a845\") " pod="openstack/nova-api-a3b3-account-create-update-dq7zp" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.533418 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b157515a-ef65-4c3c-9eb1-b015cf54a845-operator-scripts\") pod \"nova-api-a3b3-account-create-update-dq7zp\" (UID: \"b157515a-ef65-4c3c-9eb1-b015cf54a845\") " pod="openstack/nova-api-a3b3-account-create-update-dq7zp" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.565794 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhwr7\" (UniqueName: \"kubernetes.io/projected/b157515a-ef65-4c3c-9eb1-b015cf54a845-kube-api-access-jhwr7\") pod \"nova-api-a3b3-account-create-update-dq7zp\" (UID: \"b157515a-ef65-4c3c-9eb1-b015cf54a845\") " pod="openstack/nova-api-a3b3-account-create-update-dq7zp" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.606345 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-d089-account-create-update-vrbcv"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.607892 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d089-account-create-update-vrbcv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.610326 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.615917 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-d089-account-create-update-vrbcv"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634396 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/732f4945-296c-4365-8854-d4633be82d41-operator-scripts\") pod \"nova-cell0-5861-account-create-update-8fgpf\" (UID: \"732f4945-296c-4365-8854-d4633be82d41\") " pod="openstack/nova-cell0-5861-account-create-update-8fgpf" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634441 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svpzh\" (UniqueName: \"kubernetes.io/projected/233822fd-9e0a-4c0c-8591-0fce2284f28c-kube-api-access-svpzh\") pod \"nova-cell1-db-create-gcjzv\" (UID: \"233822fd-9e0a-4c0c-8591-0fce2284f28c\") " pod="openstack/nova-cell1-db-create-gcjzv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634465 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-run-httpd\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634492 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233822fd-9e0a-4c0c-8591-0fce2284f28c-operator-scripts\") pod \"nova-cell1-db-create-gcjzv\" (UID: \"233822fd-9e0a-4c0c-8591-0fce2284f28c\") " pod="openstack/nova-cell1-db-create-gcjzv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634511 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634527 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-scripts\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634555 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee5b5017-e8e0-40e3-b535-188b1443458e-operator-scripts\") pod \"nova-cell1-d089-account-create-update-vrbcv\" (UID: \"ee5b5017-e8e0-40e3-b535-188b1443458e\") " pod="openstack/nova-cell1-d089-account-create-update-vrbcv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634589 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-config-data\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634611 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msm9g\" (UniqueName: \"kubernetes.io/projected/51198eb2-0d86-405e-b7f6-aa3079520932-kube-api-access-msm9g\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634649 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z59qh\" (UniqueName: \"kubernetes.io/projected/732f4945-296c-4365-8854-d4633be82d41-kube-api-access-z59qh\") pod \"nova-cell0-5861-account-create-update-8fgpf\" (UID: \"732f4945-296c-4365-8854-d4633be82d41\") " pod="openstack/nova-cell0-5861-account-create-update-8fgpf" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634667 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-log-httpd\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634703 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.634724 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hchv9\" (UniqueName: \"kubernetes.io/projected/ee5b5017-e8e0-40e3-b535-188b1443458e-kube-api-access-hchv9\") pod \"nova-cell1-d089-account-create-update-vrbcv\" (UID: \"ee5b5017-e8e0-40e3-b535-188b1443458e\") " pod="openstack/nova-cell1-d089-account-create-update-vrbcv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.635345 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/732f4945-296c-4365-8854-d4633be82d41-operator-scripts\") pod \"nova-cell0-5861-account-create-update-8fgpf\" (UID: \"732f4945-296c-4365-8854-d4633be82d41\") " pod="openstack/nova-cell0-5861-account-create-update-8fgpf" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.635519 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-run-httpd\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.635745 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233822fd-9e0a-4c0c-8591-0fce2284f28c-operator-scripts\") pod \"nova-cell1-db-create-gcjzv\" (UID: \"233822fd-9e0a-4c0c-8591-0fce2284f28c\") " pod="openstack/nova-cell1-db-create-gcjzv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.641957 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-log-httpd\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.651264 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-scripts\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.651731 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.653449 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.656687 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-config-data\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.657164 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z59qh\" (UniqueName: \"kubernetes.io/projected/732f4945-296c-4365-8854-d4633be82d41-kube-api-access-z59qh\") pod \"nova-cell0-5861-account-create-update-8fgpf\" (UID: \"732f4945-296c-4365-8854-d4633be82d41\") " pod="openstack/nova-cell0-5861-account-create-update-8fgpf" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.661749 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svpzh\" (UniqueName: \"kubernetes.io/projected/233822fd-9e0a-4c0c-8591-0fce2284f28c-kube-api-access-svpzh\") pod \"nova-cell1-db-create-gcjzv\" (UID: \"233822fd-9e0a-4c0c-8591-0fce2284f28c\") " pod="openstack/nova-cell1-db-create-gcjzv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.661995 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msm9g\" (UniqueName: \"kubernetes.io/projected/51198eb2-0d86-405e-b7f6-aa3079520932-kube-api-access-msm9g\") pod \"ceilometer-0\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.690330 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bm5gm" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.727975 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5861-account-create-update-8fgpf" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.736460 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee5b5017-e8e0-40e3-b535-188b1443458e-operator-scripts\") pod \"nova-cell1-d089-account-create-update-vrbcv\" (UID: \"ee5b5017-e8e0-40e3-b535-188b1443458e\") " pod="openstack/nova-cell1-d089-account-create-update-vrbcv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.736851 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hchv9\" (UniqueName: \"kubernetes.io/projected/ee5b5017-e8e0-40e3-b535-188b1443458e-kube-api-access-hchv9\") pod \"nova-cell1-d089-account-create-update-vrbcv\" (UID: \"ee5b5017-e8e0-40e3-b535-188b1443458e\") " pod="openstack/nova-cell1-d089-account-create-update-vrbcv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.737643 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee5b5017-e8e0-40e3-b535-188b1443458e-operator-scripts\") pod \"nova-cell1-d089-account-create-update-vrbcv\" (UID: \"ee5b5017-e8e0-40e3-b535-188b1443458e\") " pod="openstack/nova-cell1-d089-account-create-update-vrbcv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.753567 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hchv9\" (UniqueName: \"kubernetes.io/projected/ee5b5017-e8e0-40e3-b535-188b1443458e-kube-api-access-hchv9\") pod \"nova-cell1-d089-account-create-update-vrbcv\" (UID: \"ee5b5017-e8e0-40e3-b535-188b1443458e\") " pod="openstack/nova-cell1-d089-account-create-update-vrbcv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.819564 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a3b3-account-create-update-dq7zp" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.898411 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-d4cr7"] Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.903712 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.928740 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d089-account-create-update-vrbcv" Feb 02 17:09:50 crc kubenswrapper[4835]: I0202 17:09:50.936488 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gcjzv" Feb 02 17:09:51 crc kubenswrapper[4835]: I0202 17:09:51.172422 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-bm5gm"] Feb 02 17:09:51 crc kubenswrapper[4835]: W0202 17:09:51.205503 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod118439ce_dfb4_462c_91f5_c989b2f82f1b.slice/crio-23071f78035a27c827ff5a37df097538a1ca2c1515e2092e05d5a74689e3a9e8 WatchSource:0}: Error finding container 23071f78035a27c827ff5a37df097538a1ca2c1515e2092e05d5a74689e3a9e8: Status 404 returned error can't find the container with id 23071f78035a27c827ff5a37df097538a1ca2c1515e2092e05d5a74689e3a9e8 Feb 02 17:09:51 crc kubenswrapper[4835]: I0202 17:09:51.222693 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19760545-1b23-461f-9e4b-d8b2d798fbcb" path="/var/lib/kubelet/pods/19760545-1b23-461f-9e4b-d8b2d798fbcb/volumes" Feb 02 17:09:51 crc kubenswrapper[4835]: I0202 17:09:51.228006 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-d4cr7" event={"ID":"a836f890-a488-4781-bafc-1e8a3b91f0a7","Type":"ContainerStarted","Data":"8ca5814f164e9f2918f97bc0ee027b68e6cd8e969506ee495be9914537950496"} Feb 02 17:09:51 crc kubenswrapper[4835]: I0202 17:09:51.305361 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5861-account-create-update-8fgpf"] Feb 02 17:09:51 crc kubenswrapper[4835]: W0202 17:09:51.466132 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb157515a_ef65_4c3c_9eb1_b015cf54a845.slice/crio-0e141a2fd00538e1fd47557a5af7463e0f1448f3abccb997310aa0c4dc130b62 WatchSource:0}: Error finding container 0e141a2fd00538e1fd47557a5af7463e0f1448f3abccb997310aa0c4dc130b62: Status 404 returned error can't find the container with id 0e141a2fd00538e1fd47557a5af7463e0f1448f3abccb997310aa0c4dc130b62 Feb 02 17:09:51 crc kubenswrapper[4835]: I0202 17:09:51.466533 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a3b3-account-create-update-dq7zp"] Feb 02 17:09:51 crc kubenswrapper[4835]: I0202 17:09:51.559694 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-d089-account-create-update-vrbcv"] Feb 02 17:09:51 crc kubenswrapper[4835]: I0202 17:09:51.573204 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:51 crc kubenswrapper[4835]: W0202 17:09:51.576663 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee5b5017_e8e0_40e3_b535_188b1443458e.slice/crio-e3d2674778fa1ca411a3bf1deb76e02c03b6901d177dc5d4d2eadf68b730010c WatchSource:0}: Error finding container e3d2674778fa1ca411a3bf1deb76e02c03b6901d177dc5d4d2eadf68b730010c: Status 404 returned error can't find the container with id e3d2674778fa1ca411a3bf1deb76e02c03b6901d177dc5d4d2eadf68b730010c Feb 02 17:09:51 crc kubenswrapper[4835]: W0202 17:09:51.587125 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod233822fd_9e0a_4c0c_8591_0fce2284f28c.slice/crio-22137db1d921c6e4a74b7b1108041835523c9702b6c887cbefc8126c50e5eb50 WatchSource:0}: Error finding container 22137db1d921c6e4a74b7b1108041835523c9702b6c887cbefc8126c50e5eb50: Status 404 returned error can't find the container with id 22137db1d921c6e4a74b7b1108041835523c9702b6c887cbefc8126c50e5eb50 Feb 02 17:09:51 crc kubenswrapper[4835]: I0202 17:09:51.588024 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-gcjzv"] Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.244873 4835 generic.go:334] "Generic (PLEG): container finished" podID="118439ce-dfb4-462c-91f5-c989b2f82f1b" containerID="d839df2d790a94415c72bcdd9b354b2987cbf0b81567df88bb8ba7c7a742120c" exitCode=0 Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.244980 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bm5gm" event={"ID":"118439ce-dfb4-462c-91f5-c989b2f82f1b","Type":"ContainerDied","Data":"d839df2d790a94415c72bcdd9b354b2987cbf0b81567df88bb8ba7c7a742120c"} Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.245226 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bm5gm" event={"ID":"118439ce-dfb4-462c-91f5-c989b2f82f1b","Type":"ContainerStarted","Data":"23071f78035a27c827ff5a37df097538a1ca2c1515e2092e05d5a74689e3a9e8"} Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.247892 4835 generic.go:334] "Generic (PLEG): container finished" podID="b157515a-ef65-4c3c-9eb1-b015cf54a845" containerID="30e57ba54d75acf1d00b0a4c3cc76a41faa518c5165fd00ee6ba3607e2146b54" exitCode=0 Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.247997 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a3b3-account-create-update-dq7zp" event={"ID":"b157515a-ef65-4c3c-9eb1-b015cf54a845","Type":"ContainerDied","Data":"30e57ba54d75acf1d00b0a4c3cc76a41faa518c5165fd00ee6ba3607e2146b54"} Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.248034 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a3b3-account-create-update-dq7zp" event={"ID":"b157515a-ef65-4c3c-9eb1-b015cf54a845","Type":"ContainerStarted","Data":"0e141a2fd00538e1fd47557a5af7463e0f1448f3abccb997310aa0c4dc130b62"} Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.253283 4835 generic.go:334] "Generic (PLEG): container finished" podID="a836f890-a488-4781-bafc-1e8a3b91f0a7" containerID="6f383e83e9c08d5517a64d1f621ca5dbd8c9f4345cc44aa913cc65375192b2f2" exitCode=0 Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.253374 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-d4cr7" event={"ID":"a836f890-a488-4781-bafc-1e8a3b91f0a7","Type":"ContainerDied","Data":"6f383e83e9c08d5517a64d1f621ca5dbd8c9f4345cc44aa913cc65375192b2f2"} Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.255425 4835 generic.go:334] "Generic (PLEG): container finished" podID="732f4945-296c-4365-8854-d4633be82d41" containerID="a4143ddc1f2c674d003865b6b2d5c585b01901dd5af9065a28d023d5f477bf67" exitCode=0 Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.255563 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5861-account-create-update-8fgpf" event={"ID":"732f4945-296c-4365-8854-d4633be82d41","Type":"ContainerDied","Data":"a4143ddc1f2c674d003865b6b2d5c585b01901dd5af9065a28d023d5f477bf67"} Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.255608 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5861-account-create-update-8fgpf" event={"ID":"732f4945-296c-4365-8854-d4633be82d41","Type":"ContainerStarted","Data":"440687be371cb9769aabb2391d2f540232251aae0daef55714f1dd17208145cd"} Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.257683 4835 generic.go:334] "Generic (PLEG): container finished" podID="ee5b5017-e8e0-40e3-b535-188b1443458e" containerID="3bcc0f49b56c828ba04d5a7e46d43b4dc4f7a06648dc289ed0c0d97d3abbc0c5" exitCode=0 Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.257780 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-d089-account-create-update-vrbcv" event={"ID":"ee5b5017-e8e0-40e3-b535-188b1443458e","Type":"ContainerDied","Data":"3bcc0f49b56c828ba04d5a7e46d43b4dc4f7a06648dc289ed0c0d97d3abbc0c5"} Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.257804 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-d089-account-create-update-vrbcv" event={"ID":"ee5b5017-e8e0-40e3-b535-188b1443458e","Type":"ContainerStarted","Data":"e3d2674778fa1ca411a3bf1deb76e02c03b6901d177dc5d4d2eadf68b730010c"} Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.259770 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"51198eb2-0d86-405e-b7f6-aa3079520932","Type":"ContainerStarted","Data":"876c745019c9211219c26b4135132242987741ae934904c79a0c92d02c42e8ad"} Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.261758 4835 generic.go:334] "Generic (PLEG): container finished" podID="233822fd-9e0a-4c0c-8591-0fce2284f28c" containerID="c8b67bd71c4cf201081f3260954dd1d767d34ca3dc421682692a7545df7af09c" exitCode=0 Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.261790 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gcjzv" event={"ID":"233822fd-9e0a-4c0c-8591-0fce2284f28c","Type":"ContainerDied","Data":"c8b67bd71c4cf201081f3260954dd1d767d34ca3dc421682692a7545df7af09c"} Feb 02 17:09:52 crc kubenswrapper[4835]: I0202 17:09:52.261826 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gcjzv" event={"ID":"233822fd-9e0a-4c0c-8591-0fce2284f28c","Type":"ContainerStarted","Data":"22137db1d921c6e4a74b7b1108041835523c9702b6c887cbefc8126c50e5eb50"} Feb 02 17:09:53 crc kubenswrapper[4835]: I0202 17:09:53.296441 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"51198eb2-0d86-405e-b7f6-aa3079520932","Type":"ContainerStarted","Data":"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4"} Feb 02 17:09:53 crc kubenswrapper[4835]: I0202 17:09:53.296764 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"51198eb2-0d86-405e-b7f6-aa3079520932","Type":"ContainerStarted","Data":"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3"} Feb 02 17:09:53 crc kubenswrapper[4835]: I0202 17:09:53.713895 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d4cr7" Feb 02 17:09:53 crc kubenswrapper[4835]: I0202 17:09:53.820885 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtzfj\" (UniqueName: \"kubernetes.io/projected/a836f890-a488-4781-bafc-1e8a3b91f0a7-kube-api-access-xtzfj\") pod \"a836f890-a488-4781-bafc-1e8a3b91f0a7\" (UID: \"a836f890-a488-4781-bafc-1e8a3b91f0a7\") " Feb 02 17:09:53 crc kubenswrapper[4835]: I0202 17:09:53.821419 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a836f890-a488-4781-bafc-1e8a3b91f0a7-operator-scripts\") pod \"a836f890-a488-4781-bafc-1e8a3b91f0a7\" (UID: \"a836f890-a488-4781-bafc-1e8a3b91f0a7\") " Feb 02 17:09:53 crc kubenswrapper[4835]: I0202 17:09:53.822104 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a836f890-a488-4781-bafc-1e8a3b91f0a7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a836f890-a488-4781-bafc-1e8a3b91f0a7" (UID: "a836f890-a488-4781-bafc-1e8a3b91f0a7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:53 crc kubenswrapper[4835]: I0202 17:09:53.826589 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a836f890-a488-4781-bafc-1e8a3b91f0a7-kube-api-access-xtzfj" (OuterVolumeSpecName: "kube-api-access-xtzfj") pod "a836f890-a488-4781-bafc-1e8a3b91f0a7" (UID: "a836f890-a488-4781-bafc-1e8a3b91f0a7"). InnerVolumeSpecName "kube-api-access-xtzfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:53 crc kubenswrapper[4835]: I0202 17:09:53.925068 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtzfj\" (UniqueName: \"kubernetes.io/projected/a836f890-a488-4781-bafc-1e8a3b91f0a7-kube-api-access-xtzfj\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:53 crc kubenswrapper[4835]: I0202 17:09:53.925114 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a836f890-a488-4781-bafc-1e8a3b91f0a7-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.138360 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d089-account-create-update-vrbcv" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.146082 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gcjzv" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.165403 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a3b3-account-create-update-dq7zp" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.168300 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bm5gm" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.171904 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5861-account-create-update-8fgpf" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.330336 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-d089-account-create-update-vrbcv" event={"ID":"ee5b5017-e8e0-40e3-b535-188b1443458e","Type":"ContainerDied","Data":"e3d2674778fa1ca411a3bf1deb76e02c03b6901d177dc5d4d2eadf68b730010c"} Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.330382 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3d2674778fa1ca411a3bf1deb76e02c03b6901d177dc5d4d2eadf68b730010c" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.330478 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d089-account-create-update-vrbcv" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.334489 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.340745 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hchv9\" (UniqueName: \"kubernetes.io/projected/ee5b5017-e8e0-40e3-b535-188b1443458e-kube-api-access-hchv9\") pod \"ee5b5017-e8e0-40e3-b535-188b1443458e\" (UID: \"ee5b5017-e8e0-40e3-b535-188b1443458e\") " Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.340821 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/732f4945-296c-4365-8854-d4633be82d41-operator-scripts\") pod \"732f4945-296c-4365-8854-d4633be82d41\" (UID: \"732f4945-296c-4365-8854-d4633be82d41\") " Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.340930 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svpzh\" (UniqueName: \"kubernetes.io/projected/233822fd-9e0a-4c0c-8591-0fce2284f28c-kube-api-access-svpzh\") pod \"233822fd-9e0a-4c0c-8591-0fce2284f28c\" (UID: \"233822fd-9e0a-4c0c-8591-0fce2284f28c\") " Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.340956 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b157515a-ef65-4c3c-9eb1-b015cf54a845-operator-scripts\") pod \"b157515a-ef65-4c3c-9eb1-b015cf54a845\" (UID: \"b157515a-ef65-4c3c-9eb1-b015cf54a845\") " Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.341002 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/118439ce-dfb4-462c-91f5-c989b2f82f1b-operator-scripts\") pod \"118439ce-dfb4-462c-91f5-c989b2f82f1b\" (UID: \"118439ce-dfb4-462c-91f5-c989b2f82f1b\") " Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.341063 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z59qh\" (UniqueName: \"kubernetes.io/projected/732f4945-296c-4365-8854-d4633be82d41-kube-api-access-z59qh\") pod \"732f4945-296c-4365-8854-d4633be82d41\" (UID: \"732f4945-296c-4365-8854-d4633be82d41\") " Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.341098 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbp5s\" (UniqueName: \"kubernetes.io/projected/118439ce-dfb4-462c-91f5-c989b2f82f1b-kube-api-access-rbp5s\") pod \"118439ce-dfb4-462c-91f5-c989b2f82f1b\" (UID: \"118439ce-dfb4-462c-91f5-c989b2f82f1b\") " Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.341124 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233822fd-9e0a-4c0c-8591-0fce2284f28c-operator-scripts\") pod \"233822fd-9e0a-4c0c-8591-0fce2284f28c\" (UID: \"233822fd-9e0a-4c0c-8591-0fce2284f28c\") " Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.341145 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee5b5017-e8e0-40e3-b535-188b1443458e-operator-scripts\") pod \"ee5b5017-e8e0-40e3-b535-188b1443458e\" (UID: \"ee5b5017-e8e0-40e3-b535-188b1443458e\") " Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.341260 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhwr7\" (UniqueName: \"kubernetes.io/projected/b157515a-ef65-4c3c-9eb1-b015cf54a845-kube-api-access-jhwr7\") pod \"b157515a-ef65-4c3c-9eb1-b015cf54a845\" (UID: \"b157515a-ef65-4c3c-9eb1-b015cf54a845\") " Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.341703 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/118439ce-dfb4-462c-91f5-c989b2f82f1b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "118439ce-dfb4-462c-91f5-c989b2f82f1b" (UID: "118439ce-dfb4-462c-91f5-c989b2f82f1b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.341752 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/732f4945-296c-4365-8854-d4633be82d41-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "732f4945-296c-4365-8854-d4633be82d41" (UID: "732f4945-296c-4365-8854-d4633be82d41"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.341859 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/233822fd-9e0a-4c0c-8591-0fce2284f28c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "233822fd-9e0a-4c0c-8591-0fce2284f28c" (UID: "233822fd-9e0a-4c0c-8591-0fce2284f28c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.342038 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee5b5017-e8e0-40e3-b535-188b1443458e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ee5b5017-e8e0-40e3-b535-188b1443458e" (UID: "ee5b5017-e8e0-40e3-b535-188b1443458e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.342143 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b157515a-ef65-4c3c-9eb1-b015cf54a845-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b157515a-ef65-4c3c-9eb1-b015cf54a845" (UID: "b157515a-ef65-4c3c-9eb1-b015cf54a845"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.342679 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"51198eb2-0d86-405e-b7f6-aa3079520932","Type":"ContainerStarted","Data":"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094"} Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.346391 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/732f4945-296c-4365-8854-d4633be82d41-kube-api-access-z59qh" (OuterVolumeSpecName: "kube-api-access-z59qh") pod "732f4945-296c-4365-8854-d4633be82d41" (UID: "732f4945-296c-4365-8854-d4633be82d41"). InnerVolumeSpecName "kube-api-access-z59qh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.348999 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/118439ce-dfb4-462c-91f5-c989b2f82f1b-kube-api-access-rbp5s" (OuterVolumeSpecName: "kube-api-access-rbp5s") pod "118439ce-dfb4-462c-91f5-c989b2f82f1b" (UID: "118439ce-dfb4-462c-91f5-c989b2f82f1b"). InnerVolumeSpecName "kube-api-access-rbp5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.349513 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee5b5017-e8e0-40e3-b535-188b1443458e-kube-api-access-hchv9" (OuterVolumeSpecName: "kube-api-access-hchv9") pod "ee5b5017-e8e0-40e3-b535-188b1443458e" (UID: "ee5b5017-e8e0-40e3-b535-188b1443458e"). InnerVolumeSpecName "kube-api-access-hchv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.349751 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/233822fd-9e0a-4c0c-8591-0fce2284f28c-kube-api-access-svpzh" (OuterVolumeSpecName: "kube-api-access-svpzh") pod "233822fd-9e0a-4c0c-8591-0fce2284f28c" (UID: "233822fd-9e0a-4c0c-8591-0fce2284f28c"). InnerVolumeSpecName "kube-api-access-svpzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.350110 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gcjzv" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.350120 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gcjzv" event={"ID":"233822fd-9e0a-4c0c-8591-0fce2284f28c","Type":"ContainerDied","Data":"22137db1d921c6e4a74b7b1108041835523c9702b6c887cbefc8126c50e5eb50"} Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.350311 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22137db1d921c6e4a74b7b1108041835523c9702b6c887cbefc8126c50e5eb50" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.353772 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b157515a-ef65-4c3c-9eb1-b015cf54a845-kube-api-access-jhwr7" (OuterVolumeSpecName: "kube-api-access-jhwr7") pod "b157515a-ef65-4c3c-9eb1-b015cf54a845" (UID: "b157515a-ef65-4c3c-9eb1-b015cf54a845"). InnerVolumeSpecName "kube-api-access-jhwr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.356511 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bm5gm" event={"ID":"118439ce-dfb4-462c-91f5-c989b2f82f1b","Type":"ContainerDied","Data":"23071f78035a27c827ff5a37df097538a1ca2c1515e2092e05d5a74689e3a9e8"} Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.356660 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23071f78035a27c827ff5a37df097538a1ca2c1515e2092e05d5a74689e3a9e8" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.356837 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bm5gm" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.366805 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a3b3-account-create-update-dq7zp" event={"ID":"b157515a-ef65-4c3c-9eb1-b015cf54a845","Type":"ContainerDied","Data":"0e141a2fd00538e1fd47557a5af7463e0f1448f3abccb997310aa0c4dc130b62"} Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.367078 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a3b3-account-create-update-dq7zp" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.367100 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e141a2fd00538e1fd47557a5af7463e0f1448f3abccb997310aa0c4dc130b62" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.377794 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d4cr7" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.377881 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-d4cr7" event={"ID":"a836f890-a488-4781-bafc-1e8a3b91f0a7","Type":"ContainerDied","Data":"8ca5814f164e9f2918f97bc0ee027b68e6cd8e969506ee495be9914537950496"} Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.377915 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ca5814f164e9f2918f97bc0ee027b68e6cd8e969506ee495be9914537950496" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.381558 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5861-account-create-update-8fgpf" event={"ID":"732f4945-296c-4365-8854-d4633be82d41","Type":"ContainerDied","Data":"440687be371cb9769aabb2391d2f540232251aae0daef55714f1dd17208145cd"} Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.381597 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="440687be371cb9769aabb2391d2f540232251aae0daef55714f1dd17208145cd" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.381671 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5861-account-create-update-8fgpf" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.443779 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhwr7\" (UniqueName: \"kubernetes.io/projected/b157515a-ef65-4c3c-9eb1-b015cf54a845-kube-api-access-jhwr7\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.443823 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hchv9\" (UniqueName: \"kubernetes.io/projected/ee5b5017-e8e0-40e3-b535-188b1443458e-kube-api-access-hchv9\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.443840 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/732f4945-296c-4365-8854-d4633be82d41-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.443852 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svpzh\" (UniqueName: \"kubernetes.io/projected/233822fd-9e0a-4c0c-8591-0fce2284f28c-kube-api-access-svpzh\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.443867 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b157515a-ef65-4c3c-9eb1-b015cf54a845-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.443878 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/118439ce-dfb4-462c-91f5-c989b2f82f1b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.443891 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z59qh\" (UniqueName: \"kubernetes.io/projected/732f4945-296c-4365-8854-d4633be82d41-kube-api-access-z59qh\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.443902 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbp5s\" (UniqueName: \"kubernetes.io/projected/118439ce-dfb4-462c-91f5-c989b2f82f1b-kube-api-access-rbp5s\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.443914 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233822fd-9e0a-4c0c-8591-0fce2284f28c-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:54 crc kubenswrapper[4835]: I0202 17:09:54.443924 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee5b5017-e8e0-40e3-b535-188b1443458e-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:56 crc kubenswrapper[4835]: I0202 17:09:56.399150 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"51198eb2-0d86-405e-b7f6-aa3079520932","Type":"ContainerStarted","Data":"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8"} Feb 02 17:09:56 crc kubenswrapper[4835]: I0202 17:09:56.400053 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 17:09:56 crc kubenswrapper[4835]: I0202 17:09:56.400391 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="proxy-httpd" containerID="cri-o://1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8" gracePeriod=30 Feb 02 17:09:56 crc kubenswrapper[4835]: I0202 17:09:56.400556 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="sg-core" containerID="cri-o://52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094" gracePeriod=30 Feb 02 17:09:56 crc kubenswrapper[4835]: I0202 17:09:56.400599 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="ceilometer-notification-agent" containerID="cri-o://a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4" gracePeriod=30 Feb 02 17:09:56 crc kubenswrapper[4835]: I0202 17:09:56.400633 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="ceilometer-central-agent" containerID="cri-o://077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3" gracePeriod=30 Feb 02 17:09:56 crc kubenswrapper[4835]: I0202 17:09:56.430153 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.047149051 podStartE2EDuration="6.430135897s" podCreationTimestamp="2026-02-02 17:09:50 +0000 UTC" firstStartedPulling="2026-02-02 17:09:51.586807774 +0000 UTC m=+1183.208411854" lastFinishedPulling="2026-02-02 17:09:55.96979462 +0000 UTC m=+1187.591398700" observedRunningTime="2026-02-02 17:09:56.421600186 +0000 UTC m=+1188.043204266" watchObservedRunningTime="2026-02-02 17:09:56.430135897 +0000 UTC m=+1188.051739977" Feb 02 17:09:56 crc kubenswrapper[4835]: E0202 17:09:56.571145 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51198eb2_0d86_405e_b7f6_aa3079520932.slice/crio-conmon-52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094.scope\": RecentStats: unable to find data in memory cache]" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.172796 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.201143 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msm9g\" (UniqueName: \"kubernetes.io/projected/51198eb2-0d86-405e-b7f6-aa3079520932-kube-api-access-msm9g\") pod \"51198eb2-0d86-405e-b7f6-aa3079520932\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.201220 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-sg-core-conf-yaml\") pod \"51198eb2-0d86-405e-b7f6-aa3079520932\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.201798 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-scripts\") pod \"51198eb2-0d86-405e-b7f6-aa3079520932\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.201867 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-log-httpd\") pod \"51198eb2-0d86-405e-b7f6-aa3079520932\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.201920 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-config-data\") pod \"51198eb2-0d86-405e-b7f6-aa3079520932\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.201950 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-run-httpd\") pod \"51198eb2-0d86-405e-b7f6-aa3079520932\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.202012 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-combined-ca-bundle\") pod \"51198eb2-0d86-405e-b7f6-aa3079520932\" (UID: \"51198eb2-0d86-405e-b7f6-aa3079520932\") " Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.202366 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "51198eb2-0d86-405e-b7f6-aa3079520932" (UID: "51198eb2-0d86-405e-b7f6-aa3079520932"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.202663 4835 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.203676 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "51198eb2-0d86-405e-b7f6-aa3079520932" (UID: "51198eb2-0d86-405e-b7f6-aa3079520932"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.211736 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-scripts" (OuterVolumeSpecName: "scripts") pod "51198eb2-0d86-405e-b7f6-aa3079520932" (UID: "51198eb2-0d86-405e-b7f6-aa3079520932"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.211855 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51198eb2-0d86-405e-b7f6-aa3079520932-kube-api-access-msm9g" (OuterVolumeSpecName: "kube-api-access-msm9g") pod "51198eb2-0d86-405e-b7f6-aa3079520932" (UID: "51198eb2-0d86-405e-b7f6-aa3079520932"). InnerVolumeSpecName "kube-api-access-msm9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.233501 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "51198eb2-0d86-405e-b7f6-aa3079520932" (UID: "51198eb2-0d86-405e-b7f6-aa3079520932"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.303747 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.303782 4835 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/51198eb2-0d86-405e-b7f6-aa3079520932-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.303790 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msm9g\" (UniqueName: \"kubernetes.io/projected/51198eb2-0d86-405e-b7f6-aa3079520932-kube-api-access-msm9g\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.303801 4835 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.308205 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "51198eb2-0d86-405e-b7f6-aa3079520932" (UID: "51198eb2-0d86-405e-b7f6-aa3079520932"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.327365 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-config-data" (OuterVolumeSpecName: "config-data") pod "51198eb2-0d86-405e-b7f6-aa3079520932" (UID: "51198eb2-0d86-405e-b7f6-aa3079520932"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.406313 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.406389 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51198eb2-0d86-405e-b7f6-aa3079520932-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.417839 4835 generic.go:334] "Generic (PLEG): container finished" podID="51198eb2-0d86-405e-b7f6-aa3079520932" containerID="1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8" exitCode=0 Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.417873 4835 generic.go:334] "Generic (PLEG): container finished" podID="51198eb2-0d86-405e-b7f6-aa3079520932" containerID="52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094" exitCode=2 Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.417883 4835 generic.go:334] "Generic (PLEG): container finished" podID="51198eb2-0d86-405e-b7f6-aa3079520932" containerID="a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4" exitCode=0 Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.417891 4835 generic.go:334] "Generic (PLEG): container finished" podID="51198eb2-0d86-405e-b7f6-aa3079520932" containerID="077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3" exitCode=0 Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.417909 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"51198eb2-0d86-405e-b7f6-aa3079520932","Type":"ContainerDied","Data":"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8"} Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.417934 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"51198eb2-0d86-405e-b7f6-aa3079520932","Type":"ContainerDied","Data":"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094"} Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.417944 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"51198eb2-0d86-405e-b7f6-aa3079520932","Type":"ContainerDied","Data":"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4"} Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.417952 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"51198eb2-0d86-405e-b7f6-aa3079520932","Type":"ContainerDied","Data":"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3"} Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.417961 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"51198eb2-0d86-405e-b7f6-aa3079520932","Type":"ContainerDied","Data":"876c745019c9211219c26b4135132242987741ae934904c79a0c92d02c42e8ad"} Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.417975 4835 scope.go:117] "RemoveContainer" containerID="1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.418090 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.452316 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.454457 4835 scope.go:117] "RemoveContainer" containerID="52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.461262 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.479534 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.479876 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="proxy-httpd" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.479897 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="proxy-httpd" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.479907 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="233822fd-9e0a-4c0c-8591-0fce2284f28c" containerName="mariadb-database-create" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.479915 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="233822fd-9e0a-4c0c-8591-0fce2284f28c" containerName="mariadb-database-create" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.479940 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b157515a-ef65-4c3c-9eb1-b015cf54a845" containerName="mariadb-account-create-update" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.479949 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b157515a-ef65-4c3c-9eb1-b015cf54a845" containerName="mariadb-account-create-update" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.479963 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="ceilometer-notification-agent" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.479969 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="ceilometer-notification-agent" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.479978 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="732f4945-296c-4365-8854-d4633be82d41" containerName="mariadb-account-create-update" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.479984 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="732f4945-296c-4365-8854-d4633be82d41" containerName="mariadb-account-create-update" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.479992 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a836f890-a488-4781-bafc-1e8a3b91f0a7" containerName="mariadb-database-create" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480000 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a836f890-a488-4781-bafc-1e8a3b91f0a7" containerName="mariadb-database-create" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.480012 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee5b5017-e8e0-40e3-b535-188b1443458e" containerName="mariadb-account-create-update" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480018 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee5b5017-e8e0-40e3-b535-188b1443458e" containerName="mariadb-account-create-update" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.480031 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="ceilometer-central-agent" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480037 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="ceilometer-central-agent" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.480056 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="sg-core" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480061 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="sg-core" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.480071 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="118439ce-dfb4-462c-91f5-c989b2f82f1b" containerName="mariadb-database-create" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480077 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="118439ce-dfb4-462c-91f5-c989b2f82f1b" containerName="mariadb-database-create" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480234 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a836f890-a488-4781-bafc-1e8a3b91f0a7" containerName="mariadb-database-create" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480247 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="233822fd-9e0a-4c0c-8591-0fce2284f28c" containerName="mariadb-database-create" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480257 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="ceilometer-central-agent" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480265 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="732f4945-296c-4365-8854-d4633be82d41" containerName="mariadb-account-create-update" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480295 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="118439ce-dfb4-462c-91f5-c989b2f82f1b" containerName="mariadb-database-create" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480305 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="ceilometer-notification-agent" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480315 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee5b5017-e8e0-40e3-b535-188b1443458e" containerName="mariadb-account-create-update" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480326 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b157515a-ef65-4c3c-9eb1-b015cf54a845" containerName="mariadb-account-create-update" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480336 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="sg-core" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.480343 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" containerName="proxy-httpd" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.481879 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.486657 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.486824 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.495486 4835 scope.go:117] "RemoveContainer" containerID="a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.496299 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.508009 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmsq4\" (UniqueName: \"kubernetes.io/projected/a665b725-bd0b-4696-ba4f-906bda66b530-kube-api-access-vmsq4\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.508056 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-config-data\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.508104 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-log-httpd\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.508139 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.508197 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-scripts\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.508226 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-run-httpd\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.508244 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.524843 4835 scope.go:117] "RemoveContainer" containerID="077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.541742 4835 scope.go:117] "RemoveContainer" containerID="1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.542119 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8\": container with ID starting with 1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8 not found: ID does not exist" containerID="1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.542159 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8"} err="failed to get container status \"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8\": rpc error: code = NotFound desc = could not find container \"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8\": container with ID starting with 1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.542200 4835 scope.go:117] "RemoveContainer" containerID="52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.542634 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094\": container with ID starting with 52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094 not found: ID does not exist" containerID="52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.542691 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094"} err="failed to get container status \"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094\": rpc error: code = NotFound desc = could not find container \"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094\": container with ID starting with 52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.542721 4835 scope.go:117] "RemoveContainer" containerID="a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.543017 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4\": container with ID starting with a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4 not found: ID does not exist" containerID="a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.543047 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4"} err="failed to get container status \"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4\": rpc error: code = NotFound desc = could not find container \"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4\": container with ID starting with a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.543069 4835 scope.go:117] "RemoveContainer" containerID="077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3" Feb 02 17:09:57 crc kubenswrapper[4835]: E0202 17:09:57.543670 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3\": container with ID starting with 077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3 not found: ID does not exist" containerID="077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.543704 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3"} err="failed to get container status \"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3\": rpc error: code = NotFound desc = could not find container \"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3\": container with ID starting with 077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.543727 4835 scope.go:117] "RemoveContainer" containerID="1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.544021 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8"} err="failed to get container status \"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8\": rpc error: code = NotFound desc = could not find container \"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8\": container with ID starting with 1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.544052 4835 scope.go:117] "RemoveContainer" containerID="52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.544299 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094"} err="failed to get container status \"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094\": rpc error: code = NotFound desc = could not find container \"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094\": container with ID starting with 52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.544326 4835 scope.go:117] "RemoveContainer" containerID="a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.544582 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4"} err="failed to get container status \"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4\": rpc error: code = NotFound desc = could not find container \"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4\": container with ID starting with a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.544605 4835 scope.go:117] "RemoveContainer" containerID="077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.544864 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3"} err="failed to get container status \"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3\": rpc error: code = NotFound desc = could not find container \"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3\": container with ID starting with 077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.544890 4835 scope.go:117] "RemoveContainer" containerID="1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.545158 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8"} err="failed to get container status \"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8\": rpc error: code = NotFound desc = could not find container \"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8\": container with ID starting with 1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.545185 4835 scope.go:117] "RemoveContainer" containerID="52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.545513 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094"} err="failed to get container status \"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094\": rpc error: code = NotFound desc = could not find container \"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094\": container with ID starting with 52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.545539 4835 scope.go:117] "RemoveContainer" containerID="a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.545779 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4"} err="failed to get container status \"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4\": rpc error: code = NotFound desc = could not find container \"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4\": container with ID starting with a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.545802 4835 scope.go:117] "RemoveContainer" containerID="077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.546007 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3"} err="failed to get container status \"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3\": rpc error: code = NotFound desc = could not find container \"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3\": container with ID starting with 077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.546026 4835 scope.go:117] "RemoveContainer" containerID="1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.546221 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8"} err="failed to get container status \"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8\": rpc error: code = NotFound desc = could not find container \"1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8\": container with ID starting with 1d4d116e0869426f142e5b692acca48d3427eae377f404a75d670c4dcb9fb8f8 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.546247 4835 scope.go:117] "RemoveContainer" containerID="52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.546522 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094"} err="failed to get container status \"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094\": rpc error: code = NotFound desc = could not find container \"52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094\": container with ID starting with 52e7c2fdfd33a4162032eb30c2c66c694f50f79657ceac826a61f2843b0ee094 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.546552 4835 scope.go:117] "RemoveContainer" containerID="a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.546819 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4"} err="failed to get container status \"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4\": rpc error: code = NotFound desc = could not find container \"a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4\": container with ID starting with a59de92043b4ebbc994c2669e18d9740fbdac4ab149f42c9660b2bd9b79c47c4 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.546849 4835 scope.go:117] "RemoveContainer" containerID="077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.547075 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3"} err="failed to get container status \"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3\": rpc error: code = NotFound desc = could not find container \"077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3\": container with ID starting with 077200a85c33ed5718d2ef78886f017996b0f446eefdebabca05a6ca016036d3 not found: ID does not exist" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.609812 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-scripts\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.609873 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-run-httpd\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.609894 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.609985 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmsq4\" (UniqueName: \"kubernetes.io/projected/a665b725-bd0b-4696-ba4f-906bda66b530-kube-api-access-vmsq4\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.610010 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-config-data\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.610054 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-log-httpd\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.610091 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.614469 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.614805 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-run-httpd\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.615479 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-scripts\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.615803 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-log-httpd\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.616084 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.619115 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-config-data\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.633557 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmsq4\" (UniqueName: \"kubernetes.io/projected/a665b725-bd0b-4696-ba4f-906bda66b530-kube-api-access-vmsq4\") pod \"ceilometer-0\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " pod="openstack/ceilometer-0" Feb 02 17:09:57 crc kubenswrapper[4835]: I0202 17:09:57.799106 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:09:58 crc kubenswrapper[4835]: I0202 17:09:58.268891 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:09:58 crc kubenswrapper[4835]: I0202 17:09:58.451992 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a665b725-bd0b-4696-ba4f-906bda66b530","Type":"ContainerStarted","Data":"220007cbc9844674ed64c177be37ef17ddb0726aba9b35a02c9bcb3708068388"} Feb 02 17:09:59 crc kubenswrapper[4835]: I0202 17:09:59.200977 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51198eb2-0d86-405e-b7f6-aa3079520932" path="/var/lib/kubelet/pods/51198eb2-0d86-405e-b7f6-aa3079520932/volumes" Feb 02 17:09:59 crc kubenswrapper[4835]: I0202 17:09:59.462577 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a665b725-bd0b-4696-ba4f-906bda66b530","Type":"ContainerStarted","Data":"5bb99f9ba958054b24a6714bd6caf748099c12ce19844f05e3a14514fc631440"} Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.373651 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l5fts"] Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.374946 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.378070 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.378096 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-czxqn" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.378444 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.391634 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l5fts"] Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.461491 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-scripts\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.461583 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-config-data\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.461621 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cc9b\" (UniqueName: \"kubernetes.io/projected/50e803e5-2a0b-4a50-8644-fa079b131ee5-kube-api-access-6cc9b\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.461638 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.487922 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a665b725-bd0b-4696-ba4f-906bda66b530","Type":"ContainerStarted","Data":"4bf7547d0a6912f36e1df49d8a5234038a35d2c79c6fe40e29aefd0b46ee48f1"} Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.563018 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-config-data\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.563083 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cc9b\" (UniqueName: \"kubernetes.io/projected/50e803e5-2a0b-4a50-8644-fa079b131ee5-kube-api-access-6cc9b\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.563099 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.563183 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-scripts\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.567576 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.568834 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-scripts\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.568881 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-config-data\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.582236 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cc9b\" (UniqueName: \"kubernetes.io/projected/50e803e5-2a0b-4a50-8644-fa079b131ee5-kube-api-access-6cc9b\") pod \"nova-cell0-conductor-db-sync-l5fts\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:00 crc kubenswrapper[4835]: I0202 17:10:00.691313 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:01 crc kubenswrapper[4835]: I0202 17:10:01.181712 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l5fts"] Feb 02 17:10:01 crc kubenswrapper[4835]: W0202 17:10:01.191885 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50e803e5_2a0b_4a50_8644_fa079b131ee5.slice/crio-1096404d5a82b18276ec5f5e4e303c995295b36086a35f4d3c670185924e9fd7 WatchSource:0}: Error finding container 1096404d5a82b18276ec5f5e4e303c995295b36086a35f4d3c670185924e9fd7: Status 404 returned error can't find the container with id 1096404d5a82b18276ec5f5e4e303c995295b36086a35f4d3c670185924e9fd7 Feb 02 17:10:01 crc kubenswrapper[4835]: I0202 17:10:01.438699 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-66f46bdd4f-5p4b9" Feb 02 17:10:01 crc kubenswrapper[4835]: I0202 17:10:01.516973 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5958d6c764-v628z"] Feb 02 17:10:01 crc kubenswrapper[4835]: I0202 17:10:01.517209 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5958d6c764-v628z" podUID="30738051-1c87-4817-9ebf-7cdf056c4a2f" containerName="neutron-api" containerID="cri-o://6b21687bcdf10a9f8ea62b1d1352dc16b88506d6b7d32864c96a6538d1d3678f" gracePeriod=30 Feb 02 17:10:01 crc kubenswrapper[4835]: I0202 17:10:01.517617 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5958d6c764-v628z" podUID="30738051-1c87-4817-9ebf-7cdf056c4a2f" containerName="neutron-httpd" containerID="cri-o://589b6742a0cb8137f5eb8e4a1452ff1c498c2b58e3763c1dbe00f0cf2b690c8e" gracePeriod=30 Feb 02 17:10:01 crc kubenswrapper[4835]: I0202 17:10:01.524041 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a665b725-bd0b-4696-ba4f-906bda66b530","Type":"ContainerStarted","Data":"514a86009962452b49fc671601aa11dedacd171f7a45fb025da8e04bbda07222"} Feb 02 17:10:01 crc kubenswrapper[4835]: I0202 17:10:01.527717 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l5fts" event={"ID":"50e803e5-2a0b-4a50-8644-fa079b131ee5","Type":"ContainerStarted","Data":"1096404d5a82b18276ec5f5e4e303c995295b36086a35f4d3c670185924e9fd7"} Feb 02 17:10:02 crc kubenswrapper[4835]: I0202 17:10:02.539636 4835 generic.go:334] "Generic (PLEG): container finished" podID="30738051-1c87-4817-9ebf-7cdf056c4a2f" containerID="589b6742a0cb8137f5eb8e4a1452ff1c498c2b58e3763c1dbe00f0cf2b690c8e" exitCode=0 Feb 02 17:10:02 crc kubenswrapper[4835]: I0202 17:10:02.539727 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5958d6c764-v628z" event={"ID":"30738051-1c87-4817-9ebf-7cdf056c4a2f","Type":"ContainerDied","Data":"589b6742a0cb8137f5eb8e4a1452ff1c498c2b58e3763c1dbe00f0cf2b690c8e"} Feb 02 17:10:03 crc kubenswrapper[4835]: I0202 17:10:03.554302 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a665b725-bd0b-4696-ba4f-906bda66b530","Type":"ContainerStarted","Data":"f6afd2fa0d84d77ff002f354f1445b6ea03b17860be5bba985a7af00a3f58e91"} Feb 02 17:10:03 crc kubenswrapper[4835]: I0202 17:10:03.554752 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 17:10:03 crc kubenswrapper[4835]: I0202 17:10:03.613643 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.991432823 podStartE2EDuration="6.613620323s" podCreationTimestamp="2026-02-02 17:09:57 +0000 UTC" firstStartedPulling="2026-02-02 17:09:58.269521958 +0000 UTC m=+1189.891126068" lastFinishedPulling="2026-02-02 17:10:02.891709488 +0000 UTC m=+1194.513313568" observedRunningTime="2026-02-02 17:10:03.605743319 +0000 UTC m=+1195.227347409" watchObservedRunningTime="2026-02-02 17:10:03.613620323 +0000 UTC m=+1195.235224403" Feb 02 17:10:06 crc kubenswrapper[4835]: I0202 17:10:06.157225 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:10:06 crc kubenswrapper[4835]: I0202 17:10:06.311388 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-b8f544fd4-zp9bk" Feb 02 17:10:06 crc kubenswrapper[4835]: I0202 17:10:06.385690 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-8b8949f68-7cjhk"] Feb 02 17:10:06 crc kubenswrapper[4835]: I0202 17:10:06.385960 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-8b8949f68-7cjhk" podUID="a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" containerName="placement-log" containerID="cri-o://a27adc2355489323da3b7415a4f32584902e7314cd17c2bd21a4576a55b5e317" gracePeriod=30 Feb 02 17:10:06 crc kubenswrapper[4835]: I0202 17:10:06.386424 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-8b8949f68-7cjhk" podUID="a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" containerName="placement-api" containerID="cri-o://bf026e2f58b5c048bc4a83a5c3443dbb83c7388425a0dc6535e30501cd629092" gracePeriod=30 Feb 02 17:10:06 crc kubenswrapper[4835]: I0202 17:10:06.622922 4835 generic.go:334] "Generic (PLEG): container finished" podID="a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" containerID="a27adc2355489323da3b7415a4f32584902e7314cd17c2bd21a4576a55b5e317" exitCode=143 Feb 02 17:10:06 crc kubenswrapper[4835]: I0202 17:10:06.624039 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8b8949f68-7cjhk" event={"ID":"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1","Type":"ContainerDied","Data":"a27adc2355489323da3b7415a4f32584902e7314cd17c2bd21a4576a55b5e317"} Feb 02 17:10:07 crc kubenswrapper[4835]: I0202 17:10:07.635711 4835 generic.go:334] "Generic (PLEG): container finished" podID="30738051-1c87-4817-9ebf-7cdf056c4a2f" containerID="6b21687bcdf10a9f8ea62b1d1352dc16b88506d6b7d32864c96a6538d1d3678f" exitCode=0 Feb 02 17:10:07 crc kubenswrapper[4835]: I0202 17:10:07.635781 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5958d6c764-v628z" event={"ID":"30738051-1c87-4817-9ebf-7cdf056c4a2f","Type":"ContainerDied","Data":"6b21687bcdf10a9f8ea62b1d1352dc16b88506d6b7d32864c96a6538d1d3678f"} Feb 02 17:10:08 crc kubenswrapper[4835]: I0202 17:10:08.636967 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:08 crc kubenswrapper[4835]: I0202 17:10:08.637571 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="ceilometer-central-agent" containerID="cri-o://5bb99f9ba958054b24a6714bd6caf748099c12ce19844f05e3a14514fc631440" gracePeriod=30 Feb 02 17:10:08 crc kubenswrapper[4835]: I0202 17:10:08.637649 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="proxy-httpd" containerID="cri-o://f6afd2fa0d84d77ff002f354f1445b6ea03b17860be5bba985a7af00a3f58e91" gracePeriod=30 Feb 02 17:10:08 crc kubenswrapper[4835]: I0202 17:10:08.637660 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="sg-core" containerID="cri-o://514a86009962452b49fc671601aa11dedacd171f7a45fb025da8e04bbda07222" gracePeriod=30 Feb 02 17:10:08 crc kubenswrapper[4835]: I0202 17:10:08.637711 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="ceilometer-notification-agent" containerID="cri-o://4bf7547d0a6912f36e1df49d8a5234038a35d2c79c6fe40e29aefd0b46ee48f1" gracePeriod=30 Feb 02 17:10:09 crc kubenswrapper[4835]: I0202 17:10:09.668964 4835 generic.go:334] "Generic (PLEG): container finished" podID="a665b725-bd0b-4696-ba4f-906bda66b530" containerID="f6afd2fa0d84d77ff002f354f1445b6ea03b17860be5bba985a7af00a3f58e91" exitCode=0 Feb 02 17:10:09 crc kubenswrapper[4835]: I0202 17:10:09.669263 4835 generic.go:334] "Generic (PLEG): container finished" podID="a665b725-bd0b-4696-ba4f-906bda66b530" containerID="514a86009962452b49fc671601aa11dedacd171f7a45fb025da8e04bbda07222" exitCode=2 Feb 02 17:10:09 crc kubenswrapper[4835]: I0202 17:10:09.669294 4835 generic.go:334] "Generic (PLEG): container finished" podID="a665b725-bd0b-4696-ba4f-906bda66b530" containerID="4bf7547d0a6912f36e1df49d8a5234038a35d2c79c6fe40e29aefd0b46ee48f1" exitCode=0 Feb 02 17:10:09 crc kubenswrapper[4835]: I0202 17:10:09.669305 4835 generic.go:334] "Generic (PLEG): container finished" podID="a665b725-bd0b-4696-ba4f-906bda66b530" containerID="5bb99f9ba958054b24a6714bd6caf748099c12ce19844f05e3a14514fc631440" exitCode=0 Feb 02 17:10:09 crc kubenswrapper[4835]: I0202 17:10:09.669130 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a665b725-bd0b-4696-ba4f-906bda66b530","Type":"ContainerDied","Data":"f6afd2fa0d84d77ff002f354f1445b6ea03b17860be5bba985a7af00a3f58e91"} Feb 02 17:10:09 crc kubenswrapper[4835]: I0202 17:10:09.669360 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a665b725-bd0b-4696-ba4f-906bda66b530","Type":"ContainerDied","Data":"514a86009962452b49fc671601aa11dedacd171f7a45fb025da8e04bbda07222"} Feb 02 17:10:09 crc kubenswrapper[4835]: I0202 17:10:09.669370 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a665b725-bd0b-4696-ba4f-906bda66b530","Type":"ContainerDied","Data":"4bf7547d0a6912f36e1df49d8a5234038a35d2c79c6fe40e29aefd0b46ee48f1"} Feb 02 17:10:09 crc kubenswrapper[4835]: I0202 17:10:09.669381 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a665b725-bd0b-4696-ba4f-906bda66b530","Type":"ContainerDied","Data":"5bb99f9ba958054b24a6714bd6caf748099c12ce19844f05e3a14514fc631440"} Feb 02 17:10:09 crc kubenswrapper[4835]: I0202 17:10:09.677445 4835 generic.go:334] "Generic (PLEG): container finished" podID="a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" containerID="bf026e2f58b5c048bc4a83a5c3443dbb83c7388425a0dc6535e30501cd629092" exitCode=0 Feb 02 17:10:09 crc kubenswrapper[4835]: I0202 17:10:09.677485 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8b8949f68-7cjhk" event={"ID":"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1","Type":"ContainerDied","Data":"bf026e2f58b5c048bc4a83a5c3443dbb83c7388425a0dc6535e30501cd629092"} Feb 02 17:10:09 crc kubenswrapper[4835]: I0202 17:10:09.922360 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.040221 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-httpd-config\") pod \"30738051-1c87-4817-9ebf-7cdf056c4a2f\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.040625 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-combined-ca-bundle\") pod \"30738051-1c87-4817-9ebf-7cdf056c4a2f\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.040682 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-ovndb-tls-certs\") pod \"30738051-1c87-4817-9ebf-7cdf056c4a2f\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.040710 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-config\") pod \"30738051-1c87-4817-9ebf-7cdf056c4a2f\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.040750 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p27g\" (UniqueName: \"kubernetes.io/projected/30738051-1c87-4817-9ebf-7cdf056c4a2f-kube-api-access-5p27g\") pod \"30738051-1c87-4817-9ebf-7cdf056c4a2f\" (UID: \"30738051-1c87-4817-9ebf-7cdf056c4a2f\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.056717 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "30738051-1c87-4817-9ebf-7cdf056c4a2f" (UID: "30738051-1c87-4817-9ebf-7cdf056c4a2f"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.059695 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30738051-1c87-4817-9ebf-7cdf056c4a2f-kube-api-access-5p27g" (OuterVolumeSpecName: "kube-api-access-5p27g") pod "30738051-1c87-4817-9ebf-7cdf056c4a2f" (UID: "30738051-1c87-4817-9ebf-7cdf056c4a2f"). InnerVolumeSpecName "kube-api-access-5p27g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.133535 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-config" (OuterVolumeSpecName: "config") pod "30738051-1c87-4817-9ebf-7cdf056c4a2f" (UID: "30738051-1c87-4817-9ebf-7cdf056c4a2f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.142737 4835 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.142775 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.142788 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p27g\" (UniqueName: \"kubernetes.io/projected/30738051-1c87-4817-9ebf-7cdf056c4a2f-kube-api-access-5p27g\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.144215 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "30738051-1c87-4817-9ebf-7cdf056c4a2f" (UID: "30738051-1c87-4817-9ebf-7cdf056c4a2f"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.158303 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30738051-1c87-4817-9ebf-7cdf056c4a2f" (UID: "30738051-1c87-4817-9ebf-7cdf056c4a2f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.160323 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.174902 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.244825 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.244867 4835 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/30738051-1c87-4817-9ebf-7cdf056c4a2f-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346083 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-config-data\") pod \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346184 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-combined-ca-bundle\") pod \"a665b725-bd0b-4696-ba4f-906bda66b530\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346222 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfxt8\" (UniqueName: \"kubernetes.io/projected/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-kube-api-access-xfxt8\") pod \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346362 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-combined-ca-bundle\") pod \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346412 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-sg-core-conf-yaml\") pod \"a665b725-bd0b-4696-ba4f-906bda66b530\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346437 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-scripts\") pod \"a665b725-bd0b-4696-ba4f-906bda66b530\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346459 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmsq4\" (UniqueName: \"kubernetes.io/projected/a665b725-bd0b-4696-ba4f-906bda66b530-kube-api-access-vmsq4\") pod \"a665b725-bd0b-4696-ba4f-906bda66b530\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346528 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-logs\") pod \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346566 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-internal-tls-certs\") pod \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346589 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-log-httpd\") pod \"a665b725-bd0b-4696-ba4f-906bda66b530\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346632 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-scripts\") pod \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346669 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-public-tls-certs\") pod \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\" (UID: \"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346703 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-run-httpd\") pod \"a665b725-bd0b-4696-ba4f-906bda66b530\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.346748 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-config-data\") pod \"a665b725-bd0b-4696-ba4f-906bda66b530\" (UID: \"a665b725-bd0b-4696-ba4f-906bda66b530\") " Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.348934 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-logs" (OuterVolumeSpecName: "logs") pod "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" (UID: "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.349293 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a665b725-bd0b-4696-ba4f-906bda66b530" (UID: "a665b725-bd0b-4696-ba4f-906bda66b530"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.349542 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a665b725-bd0b-4696-ba4f-906bda66b530" (UID: "a665b725-bd0b-4696-ba4f-906bda66b530"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.351224 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-scripts" (OuterVolumeSpecName: "scripts") pod "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" (UID: "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.352541 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-kube-api-access-xfxt8" (OuterVolumeSpecName: "kube-api-access-xfxt8") pod "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" (UID: "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1"). InnerVolumeSpecName "kube-api-access-xfxt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.352638 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a665b725-bd0b-4696-ba4f-906bda66b530-kube-api-access-vmsq4" (OuterVolumeSpecName: "kube-api-access-vmsq4") pod "a665b725-bd0b-4696-ba4f-906bda66b530" (UID: "a665b725-bd0b-4696-ba4f-906bda66b530"). InnerVolumeSpecName "kube-api-access-vmsq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.358737 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-scripts" (OuterVolumeSpecName: "scripts") pod "a665b725-bd0b-4696-ba4f-906bda66b530" (UID: "a665b725-bd0b-4696-ba4f-906bda66b530"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.378845 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a665b725-bd0b-4696-ba4f-906bda66b530" (UID: "a665b725-bd0b-4696-ba4f-906bda66b530"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.401011 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-config-data" (OuterVolumeSpecName: "config-data") pod "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" (UID: "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.426668 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a665b725-bd0b-4696-ba4f-906bda66b530" (UID: "a665b725-bd0b-4696-ba4f-906bda66b530"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.430563 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" (UID: "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.451238 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.451284 4835 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.451298 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.451307 4835 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a665b725-bd0b-4696-ba4f-906bda66b530-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.451317 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.451329 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.451342 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfxt8\" (UniqueName: \"kubernetes.io/projected/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-kube-api-access-xfxt8\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.451352 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.451361 4835 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.451370 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.451379 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmsq4\" (UniqueName: \"kubernetes.io/projected/a665b725-bd0b-4696-ba4f-906bda66b530-kube-api-access-vmsq4\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.471376 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" (UID: "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.483570 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" (UID: "a66823b3-c9eb-44bd-a7cc-56c1b5f780c1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.512009 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-config-data" (OuterVolumeSpecName: "config-data") pod "a665b725-bd0b-4696-ba4f-906bda66b530" (UID: "a665b725-bd0b-4696-ba4f-906bda66b530"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.552540 4835 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.552589 4835 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.552604 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a665b725-bd0b-4696-ba4f-906bda66b530-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.687711 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5958d6c764-v628z" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.687709 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5958d6c764-v628z" event={"ID":"30738051-1c87-4817-9ebf-7cdf056c4a2f","Type":"ContainerDied","Data":"7c700c232dd6dc037a5d8765bff71a773ab274d52536fa93d3b20118c31c8bf0"} Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.687848 4835 scope.go:117] "RemoveContainer" containerID="589b6742a0cb8137f5eb8e4a1452ff1c498c2b58e3763c1dbe00f0cf2b690c8e" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.690485 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a665b725-bd0b-4696-ba4f-906bda66b530","Type":"ContainerDied","Data":"220007cbc9844674ed64c177be37ef17ddb0726aba9b35a02c9bcb3708068388"} Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.690535 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.692598 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l5fts" event={"ID":"50e803e5-2a0b-4a50-8644-fa079b131ee5","Type":"ContainerStarted","Data":"fb30dfe4f380c1a85fdce7452c1c6efad60727f79f3d6b27b8d5404fb145351d"} Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.699759 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8b8949f68-7cjhk" event={"ID":"a66823b3-c9eb-44bd-a7cc-56c1b5f780c1","Type":"ContainerDied","Data":"9dbf5fb258595105f66d61759b0724dba175cca3e2708733be95c5cf180e92a1"} Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.699852 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8b8949f68-7cjhk" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.711141 4835 scope.go:117] "RemoveContainer" containerID="6b21687bcdf10a9f8ea62b1d1352dc16b88506d6b7d32864c96a6538d1d3678f" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.730491 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-l5fts" podStartSLOduration=2.052010357 podStartE2EDuration="10.730471561s" podCreationTimestamp="2026-02-02 17:10:00 +0000 UTC" firstStartedPulling="2026-02-02 17:10:01.194205774 +0000 UTC m=+1192.815809844" lastFinishedPulling="2026-02-02 17:10:09.872666968 +0000 UTC m=+1201.494271048" observedRunningTime="2026-02-02 17:10:10.728410922 +0000 UTC m=+1202.350014992" watchObservedRunningTime="2026-02-02 17:10:10.730471561 +0000 UTC m=+1202.352075641" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.734042 4835 scope.go:117] "RemoveContainer" containerID="f6afd2fa0d84d77ff002f354f1445b6ea03b17860be5bba985a7af00a3f58e91" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.763876 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5958d6c764-v628z"] Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.765742 4835 scope.go:117] "RemoveContainer" containerID="514a86009962452b49fc671601aa11dedacd171f7a45fb025da8e04bbda07222" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.779796 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5958d6c764-v628z"] Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.786137 4835 scope.go:117] "RemoveContainer" containerID="4bf7547d0a6912f36e1df49d8a5234038a35d2c79c6fe40e29aefd0b46ee48f1" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.789405 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-8b8949f68-7cjhk"] Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.798569 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-8b8949f68-7cjhk"] Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.812660 4835 scope.go:117] "RemoveContainer" containerID="5bb99f9ba958054b24a6714bd6caf748099c12ce19844f05e3a14514fc631440" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.815848 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.825790 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.835044 4835 scope.go:117] "RemoveContainer" containerID="bf026e2f58b5c048bc4a83a5c3443dbb83c7388425a0dc6535e30501cd629092" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.839921 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:10 crc kubenswrapper[4835]: E0202 17:10:10.840266 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="ceilometer-central-agent" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840305 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="ceilometer-central-agent" Feb 02 17:10:10 crc kubenswrapper[4835]: E0202 17:10:10.840321 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="proxy-httpd" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840327 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="proxy-httpd" Feb 02 17:10:10 crc kubenswrapper[4835]: E0202 17:10:10.840341 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="sg-core" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840347 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="sg-core" Feb 02 17:10:10 crc kubenswrapper[4835]: E0202 17:10:10.840359 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" containerName="placement-log" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840364 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" containerName="placement-log" Feb 02 17:10:10 crc kubenswrapper[4835]: E0202 17:10:10.840373 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30738051-1c87-4817-9ebf-7cdf056c4a2f" containerName="neutron-api" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840379 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="30738051-1c87-4817-9ebf-7cdf056c4a2f" containerName="neutron-api" Feb 02 17:10:10 crc kubenswrapper[4835]: E0202 17:10:10.840396 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="ceilometer-notification-agent" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840402 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="ceilometer-notification-agent" Feb 02 17:10:10 crc kubenswrapper[4835]: E0202 17:10:10.840410 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30738051-1c87-4817-9ebf-7cdf056c4a2f" containerName="neutron-httpd" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840416 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="30738051-1c87-4817-9ebf-7cdf056c4a2f" containerName="neutron-httpd" Feb 02 17:10:10 crc kubenswrapper[4835]: E0202 17:10:10.840431 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" containerName="placement-api" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840437 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" containerName="placement-api" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840599 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="30738051-1c87-4817-9ebf-7cdf056c4a2f" containerName="neutron-api" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840612 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" containerName="placement-log" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840618 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="30738051-1c87-4817-9ebf-7cdf056c4a2f" containerName="neutron-httpd" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840631 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" containerName="placement-api" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840640 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="ceilometer-central-agent" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840650 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="ceilometer-notification-agent" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840662 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="proxy-httpd" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.840669 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" containerName="sg-core" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.842294 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.853954 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.853984 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.857434 4835 scope.go:117] "RemoveContainer" containerID="a27adc2355489323da3b7415a4f32584902e7314cd17c2bd21a4576a55b5e317" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.863618 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.959727 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-config-data\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.960035 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-log-httpd\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.960081 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.960114 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-scripts\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.960234 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.960301 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-run-httpd\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:10 crc kubenswrapper[4835]: I0202 17:10:10.960338 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p78kc\" (UniqueName: \"kubernetes.io/projected/123189a5-58c2-406b-877a-e39bba484f73-kube-api-access-p78kc\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.061870 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.061916 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-run-httpd\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.061940 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p78kc\" (UniqueName: \"kubernetes.io/projected/123189a5-58c2-406b-877a-e39bba484f73-kube-api-access-p78kc\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.062006 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-config-data\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.062105 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-log-httpd\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.062127 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.062146 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-scripts\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.062841 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-run-httpd\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.063076 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-log-httpd\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.066161 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.068653 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.069579 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-config-data\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.070804 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-scripts\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.087196 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p78kc\" (UniqueName: \"kubernetes.io/projected/123189a5-58c2-406b-877a-e39bba484f73-kube-api-access-p78kc\") pod \"ceilometer-0\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.165925 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.199424 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30738051-1c87-4817-9ebf-7cdf056c4a2f" path="/var/lib/kubelet/pods/30738051-1c87-4817-9ebf-7cdf056c4a2f/volumes" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.200135 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a665b725-bd0b-4696-ba4f-906bda66b530" path="/var/lib/kubelet/pods/a665b725-bd0b-4696-ba4f-906bda66b530/volumes" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.201117 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a66823b3-c9eb-44bd-a7cc-56c1b5f780c1" path="/var/lib/kubelet/pods/a66823b3-c9eb-44bd-a7cc-56c1b5f780c1/volumes" Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.607978 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:11 crc kubenswrapper[4835]: W0202 17:10:11.619578 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod123189a5_58c2_406b_877a_e39bba484f73.slice/crio-3efe3935b862befc810fb9450de95761aef52c6070a24698792b753bb18ad7fa WatchSource:0}: Error finding container 3efe3935b862befc810fb9450de95761aef52c6070a24698792b753bb18ad7fa: Status 404 returned error can't find the container with id 3efe3935b862befc810fb9450de95761aef52c6070a24698792b753bb18ad7fa Feb 02 17:10:11 crc kubenswrapper[4835]: I0202 17:10:11.710063 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123189a5-58c2-406b-877a-e39bba484f73","Type":"ContainerStarted","Data":"3efe3935b862befc810fb9450de95761aef52c6070a24698792b753bb18ad7fa"} Feb 02 17:10:13 crc kubenswrapper[4835]: I0202 17:10:13.727743 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123189a5-58c2-406b-877a-e39bba484f73","Type":"ContainerStarted","Data":"f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27"} Feb 02 17:10:13 crc kubenswrapper[4835]: I0202 17:10:13.728319 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123189a5-58c2-406b-877a-e39bba484f73","Type":"ContainerStarted","Data":"3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45"} Feb 02 17:10:14 crc kubenswrapper[4835]: I0202 17:10:14.741469 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123189a5-58c2-406b-877a-e39bba484f73","Type":"ContainerStarted","Data":"5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02"} Feb 02 17:10:14 crc kubenswrapper[4835]: I0202 17:10:14.871012 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:10:14 crc kubenswrapper[4835]: I0202 17:10:14.871082 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:10:17 crc kubenswrapper[4835]: I0202 17:10:17.774831 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123189a5-58c2-406b-877a-e39bba484f73","Type":"ContainerStarted","Data":"c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66"} Feb 02 17:10:17 crc kubenswrapper[4835]: I0202 17:10:17.775462 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 17:10:17 crc kubenswrapper[4835]: I0202 17:10:17.802983 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.812972306 podStartE2EDuration="7.802957282s" podCreationTimestamp="2026-02-02 17:10:10 +0000 UTC" firstStartedPulling="2026-02-02 17:10:11.62313802 +0000 UTC m=+1203.244742100" lastFinishedPulling="2026-02-02 17:10:16.613122996 +0000 UTC m=+1208.234727076" observedRunningTime="2026-02-02 17:10:17.795172472 +0000 UTC m=+1209.416776562" watchObservedRunningTime="2026-02-02 17:10:17.802957282 +0000 UTC m=+1209.424561372" Feb 02 17:10:19 crc kubenswrapper[4835]: I0202 17:10:19.796905 4835 generic.go:334] "Generic (PLEG): container finished" podID="50e803e5-2a0b-4a50-8644-fa079b131ee5" containerID="fb30dfe4f380c1a85fdce7452c1c6efad60727f79f3d6b27b8d5404fb145351d" exitCode=0 Feb 02 17:10:19 crc kubenswrapper[4835]: I0202 17:10:19.797038 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l5fts" event={"ID":"50e803e5-2a0b-4a50-8644-fa079b131ee5","Type":"ContainerDied","Data":"fb30dfe4f380c1a85fdce7452c1c6efad60727f79f3d6b27b8d5404fb145351d"} Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.205484 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.210670 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cc9b\" (UniqueName: \"kubernetes.io/projected/50e803e5-2a0b-4a50-8644-fa079b131ee5-kube-api-access-6cc9b\") pod \"50e803e5-2a0b-4a50-8644-fa079b131ee5\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.210720 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-combined-ca-bundle\") pod \"50e803e5-2a0b-4a50-8644-fa079b131ee5\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.210816 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-scripts\") pod \"50e803e5-2a0b-4a50-8644-fa079b131ee5\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.210852 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-config-data\") pod \"50e803e5-2a0b-4a50-8644-fa079b131ee5\" (UID: \"50e803e5-2a0b-4a50-8644-fa079b131ee5\") " Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.216777 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-scripts" (OuterVolumeSpecName: "scripts") pod "50e803e5-2a0b-4a50-8644-fa079b131ee5" (UID: "50e803e5-2a0b-4a50-8644-fa079b131ee5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.223590 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50e803e5-2a0b-4a50-8644-fa079b131ee5-kube-api-access-6cc9b" (OuterVolumeSpecName: "kube-api-access-6cc9b") pod "50e803e5-2a0b-4a50-8644-fa079b131ee5" (UID: "50e803e5-2a0b-4a50-8644-fa079b131ee5"). InnerVolumeSpecName "kube-api-access-6cc9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.247406 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-config-data" (OuterVolumeSpecName: "config-data") pod "50e803e5-2a0b-4a50-8644-fa079b131ee5" (UID: "50e803e5-2a0b-4a50-8644-fa079b131ee5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.248054 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50e803e5-2a0b-4a50-8644-fa079b131ee5" (UID: "50e803e5-2a0b-4a50-8644-fa079b131ee5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.312203 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.312241 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cc9b\" (UniqueName: \"kubernetes.io/projected/50e803e5-2a0b-4a50-8644-fa079b131ee5-kube-api-access-6cc9b\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.312256 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.312341 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50e803e5-2a0b-4a50-8644-fa079b131ee5-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.821164 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l5fts" event={"ID":"50e803e5-2a0b-4a50-8644-fa079b131ee5","Type":"ContainerDied","Data":"1096404d5a82b18276ec5f5e4e303c995295b36086a35f4d3c670185924e9fd7"} Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.821740 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1096404d5a82b18276ec5f5e4e303c995295b36086a35f4d3c670185924e9fd7" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.821218 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l5fts" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.916254 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 17:10:21 crc kubenswrapper[4835]: E0202 17:10:21.921427 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50e803e5-2a0b-4a50-8644-fa079b131ee5" containerName="nova-cell0-conductor-db-sync" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.921483 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="50e803e5-2a0b-4a50-8644-fa079b131ee5" containerName="nova-cell0-conductor-db-sync" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.921709 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="50e803e5-2a0b-4a50-8644-fa079b131ee5" containerName="nova-cell0-conductor-db-sync" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.922423 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.924478 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-czxqn" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.924621 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 02 17:10:21 crc kubenswrapper[4835]: I0202 17:10:21.927079 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.023594 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb71347d-bf06-4685-809c-a20715adc072-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fb71347d-bf06-4685-809c-a20715adc072\") " pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.024043 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz8xv\" (UniqueName: \"kubernetes.io/projected/fb71347d-bf06-4685-809c-a20715adc072-kube-api-access-cz8xv\") pod \"nova-cell0-conductor-0\" (UID: \"fb71347d-bf06-4685-809c-a20715adc072\") " pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.024332 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb71347d-bf06-4685-809c-a20715adc072-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fb71347d-bf06-4685-809c-a20715adc072\") " pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.125010 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb71347d-bf06-4685-809c-a20715adc072-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fb71347d-bf06-4685-809c-a20715adc072\") " pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.125074 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb71347d-bf06-4685-809c-a20715adc072-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fb71347d-bf06-4685-809c-a20715adc072\") " pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.125128 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz8xv\" (UniqueName: \"kubernetes.io/projected/fb71347d-bf06-4685-809c-a20715adc072-kube-api-access-cz8xv\") pod \"nova-cell0-conductor-0\" (UID: \"fb71347d-bf06-4685-809c-a20715adc072\") " pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.133088 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb71347d-bf06-4685-809c-a20715adc072-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fb71347d-bf06-4685-809c-a20715adc072\") " pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.134324 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb71347d-bf06-4685-809c-a20715adc072-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fb71347d-bf06-4685-809c-a20715adc072\") " pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.142582 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz8xv\" (UniqueName: \"kubernetes.io/projected/fb71347d-bf06-4685-809c-a20715adc072-kube-api-access-cz8xv\") pod \"nova-cell0-conductor-0\" (UID: \"fb71347d-bf06-4685-809c-a20715adc072\") " pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.246761 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.671154 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 17:10:22 crc kubenswrapper[4835]: I0202 17:10:22.830983 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fb71347d-bf06-4685-809c-a20715adc072","Type":"ContainerStarted","Data":"1554a901ed14fc94a2bbe45fe8a45aebdf51c0314d3d7a9458347e2c17a8ddd7"} Feb 02 17:10:23 crc kubenswrapper[4835]: I0202 17:10:23.843946 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fb71347d-bf06-4685-809c-a20715adc072","Type":"ContainerStarted","Data":"bc8ef546825a983e0e461c1d1bb0eec01c5a495d2cdf9609ca72a7f7ba82e2d3"} Feb 02 17:10:23 crc kubenswrapper[4835]: I0202 17:10:23.844389 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.280099 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.304653 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=6.304596597 podStartE2EDuration="6.304596597s" podCreationTimestamp="2026-02-02 17:10:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:10:23.866090279 +0000 UTC m=+1215.487694399" watchObservedRunningTime="2026-02-02 17:10:27.304596597 +0000 UTC m=+1218.926200687" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.706531 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-bftbv"] Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.707796 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.712877 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.713098 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.714813 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-bftbv"] Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.820764 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.820896 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-scripts\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.820930 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xstx\" (UniqueName: \"kubernetes.io/projected/9faacb79-efaa-411d-9d65-23b6b602b4d2-kube-api-access-6xstx\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.820976 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-config-data\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.833195 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.834372 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.837458 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.844652 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.926308 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-config-data\") pod \"nova-scheduler-0\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.926413 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.926439 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.926504 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkdbg\" (UniqueName: \"kubernetes.io/projected/d4389d06-4543-4757-9a9e-3df501e4b228-kube-api-access-dkdbg\") pod \"nova-scheduler-0\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.926546 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-scripts\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.926576 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xstx\" (UniqueName: \"kubernetes.io/projected/9faacb79-efaa-411d-9d65-23b6b602b4d2-kube-api-access-6xstx\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.926650 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-config-data\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.962228 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-scripts\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.974824 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.974836 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xstx\" (UniqueName: \"kubernetes.io/projected/9faacb79-efaa-411d-9d65-23b6b602b4d2-kube-api-access-6xstx\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.978440 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-config-data\") pod \"nova-cell0-cell-mapping-bftbv\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.996607 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 17:10:27 crc kubenswrapper[4835]: I0202 17:10:27.997759 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.013877 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.051719 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-config-data\") pod \"nova-scheduler-0\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.051849 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.051947 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkdbg\" (UniqueName: \"kubernetes.io/projected/d4389d06-4543-4757-9a9e-3df501e4b228-kube-api-access-dkdbg\") pod \"nova-scheduler-0\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.063878 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-config-data\") pod \"nova-scheduler-0\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.067086 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.072108 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.075343 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.086142 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.100316 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.119886 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkdbg\" (UniqueName: \"kubernetes.io/projected/d4389d06-4543-4757-9a9e-3df501e4b228-kube-api-access-dkdbg\") pod \"nova-scheduler-0\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.151911 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.153156 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.153333 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.153356 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx92x\" (UniqueName: \"kubernetes.io/projected/645a2937-2612-4a25-a28b-c77bf1453d7f-kube-api-access-jx92x\") pod \"nova-cell1-novncproxy-0\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.155421 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.183641 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.196074 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.198358 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.201055 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.207586 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.218986 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-7krzf"] Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.221889 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.227935 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-7krzf"] Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.257752 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-logs\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.257884 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-config-data\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.257929 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.258561 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.259716 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx92x\" (UniqueName: \"kubernetes.io/projected/645a2937-2612-4a25-a28b-c77bf1453d7f-kube-api-access-jx92x\") pod \"nova-cell1-novncproxy-0\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.260485 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzm5g\" (UniqueName: \"kubernetes.io/projected/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-kube-api-access-wzm5g\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.260963 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.262145 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.264517 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.276515 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx92x\" (UniqueName: \"kubernetes.io/projected/645a2937-2612-4a25-a28b-c77bf1453d7f-kube-api-access-jx92x\") pod \"nova-cell1-novncproxy-0\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.363980 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-config\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364375 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-988vj\" (UniqueName: \"kubernetes.io/projected/413f9f5d-9076-437b-a5b6-0b7404b81446-kube-api-access-988vj\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364528 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364578 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-config-data\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364613 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzm5g\" (UniqueName: \"kubernetes.io/projected/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-kube-api-access-wzm5g\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364639 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364664 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364709 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-logs\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364748 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-config-data\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364822 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364847 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364870 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d6a1430-374c-40ad-bab8-dfd817bafe7d-logs\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.364899 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd9qb\" (UniqueName: \"kubernetes.io/projected/8d6a1430-374c-40ad-bab8-dfd817bafe7d-kube-api-access-pd9qb\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.365564 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-logs\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.368932 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.369568 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-config-data\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.391900 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzm5g\" (UniqueName: \"kubernetes.io/projected/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-kube-api-access-wzm5g\") pod \"nova-metadata-0\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.466043 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-config-data\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.466102 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.466164 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.466181 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.466202 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d6a1430-374c-40ad-bab8-dfd817bafe7d-logs\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.466219 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd9qb\" (UniqueName: \"kubernetes.io/projected/8d6a1430-374c-40ad-bab8-dfd817bafe7d-kube-api-access-pd9qb\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.466254 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-config\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.466304 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-988vj\" (UniqueName: \"kubernetes.io/projected/413f9f5d-9076-437b-a5b6-0b7404b81446-kube-api-access-988vj\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.466333 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.467124 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.467162 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.467285 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d6a1430-374c-40ad-bab8-dfd817bafe7d-logs\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.467690 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-config\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.467991 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.471892 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-config-data\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.471907 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.490695 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.494436 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd9qb\" (UniqueName: \"kubernetes.io/projected/8d6a1430-374c-40ad-bab8-dfd817bafe7d-kube-api-access-pd9qb\") pod \"nova-api-0\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.496414 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-988vj\" (UniqueName: \"kubernetes.io/projected/413f9f5d-9076-437b-a5b6-0b7404b81446-kube-api-access-988vj\") pod \"dnsmasq-dns-8b8cf6657-7krzf\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.502312 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.520790 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.553140 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.694891 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.709177 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-bftbv"] Feb 02 17:10:28 crc kubenswrapper[4835]: W0202 17:10:28.710526 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4389d06_4543_4757_9a9e_3df501e4b228.slice/crio-49d123f44c99f7014ea263da0d5ab1e6e49e19ba8b91c8288e4446f7bc2b33fa WatchSource:0}: Error finding container 49d123f44c99f7014ea263da0d5ab1e6e49e19ba8b91c8288e4446f7bc2b33fa: Status 404 returned error can't find the container with id 49d123f44c99f7014ea263da0d5ab1e6e49e19ba8b91c8288e4446f7bc2b33fa Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.870474 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wtzmt"] Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.872189 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.875847 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.880388 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wtzmt"] Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.880487 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.907437 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bftbv" event={"ID":"9faacb79-efaa-411d-9d65-23b6b602b4d2","Type":"ContainerStarted","Data":"8faecb04d5bcb34690fb645527e870cb534ff6a86a54218b4911dc76cdb3faa6"} Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.912631 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d4389d06-4543-4757-9a9e-3df501e4b228","Type":"ContainerStarted","Data":"49d123f44c99f7014ea263da0d5ab1e6e49e19ba8b91c8288e4446f7bc2b33fa"} Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.977776 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-scripts\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.977848 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49q7q\" (UniqueName: \"kubernetes.io/projected/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-kube-api-access-49q7q\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.977948 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:28 crc kubenswrapper[4835]: I0202 17:10:28.978014 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-config-data\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:29 crc kubenswrapper[4835]: W0202 17:10:29.037123 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod645a2937_2612_4a25_a28b_c77bf1453d7f.slice/crio-e91f2ded022489a2142543f66e48d303f72639d874f7935a1586528c91c99efd WatchSource:0}: Error finding container e91f2ded022489a2142543f66e48d303f72639d874f7935a1586528c91c99efd: Status 404 returned error can't find the container with id e91f2ded022489a2142543f66e48d303f72639d874f7935a1586528c91c99efd Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.045486 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.086451 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-scripts\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.087227 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49q7q\" (UniqueName: \"kubernetes.io/projected/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-kube-api-access-49q7q\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.087467 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.087644 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-config-data\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.093366 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-config-data\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.108026 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-scripts\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.108443 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.111497 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49q7q\" (UniqueName: \"kubernetes.io/projected/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-kube-api-access-49q7q\") pod \"nova-cell1-conductor-db-sync-wtzmt\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.171945 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:10:29 crc kubenswrapper[4835]: W0202 17:10:29.185458 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ddee5cf_1f78_4957_8c94_ec89fda1bb29.slice/crio-24773528e2d25c557dcdd01edca006a70bc30e1531fbc23349f170ce22ceb2d5 WatchSource:0}: Error finding container 24773528e2d25c557dcdd01edca006a70bc30e1531fbc23349f170ce22ceb2d5: Status 404 returned error can't find the container with id 24773528e2d25c557dcdd01edca006a70bc30e1531fbc23349f170ce22ceb2d5 Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.207151 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.210634 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.331773 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-7krzf"] Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.712396 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wtzmt"] Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.930624 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d6a1430-374c-40ad-bab8-dfd817bafe7d","Type":"ContainerStarted","Data":"a75374852cff5678010af9d3126f6503b7eb5490f58c1248b3f855ee69ba1a91"} Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.947679 4835 generic.go:334] "Generic (PLEG): container finished" podID="413f9f5d-9076-437b-a5b6-0b7404b81446" containerID="4d14276e7078fb1ae5469cb0c2dc4f0ceb615e092b9a00e8c2df7341e52e38b9" exitCode=0 Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.947755 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" event={"ID":"413f9f5d-9076-437b-a5b6-0b7404b81446","Type":"ContainerDied","Data":"4d14276e7078fb1ae5469cb0c2dc4f0ceb615e092b9a00e8c2df7341e52e38b9"} Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.947780 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" event={"ID":"413f9f5d-9076-437b-a5b6-0b7404b81446","Type":"ContainerStarted","Data":"3f7fb67d2d02510842811aee61261f7c2d1f373b8ac4c465ca1683502771b84a"} Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.952025 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"645a2937-2612-4a25-a28b-c77bf1453d7f","Type":"ContainerStarted","Data":"e91f2ded022489a2142543f66e48d303f72639d874f7935a1586528c91c99efd"} Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.957420 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bftbv" event={"ID":"9faacb79-efaa-411d-9d65-23b6b602b4d2","Type":"ContainerStarted","Data":"42d0cb082a50cb0d6c429ebb9f06a478f7a888aef9d9c712877d6e7a72c670a7"} Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.958894 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2ddee5cf-1f78-4957-8c94-ec89fda1bb29","Type":"ContainerStarted","Data":"24773528e2d25c557dcdd01edca006a70bc30e1531fbc23349f170ce22ceb2d5"} Feb 02 17:10:29 crc kubenswrapper[4835]: I0202 17:10:29.964875 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wtzmt" event={"ID":"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6","Type":"ContainerStarted","Data":"26ccdf7aff802523915d721e14a4df99d69311b2668aaf93ce31bd6ee735cbe6"} Feb 02 17:10:30 crc kubenswrapper[4835]: I0202 17:10:30.005719 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-wtzmt" podStartSLOduration=2.005703801 podStartE2EDuration="2.005703801s" podCreationTimestamp="2026-02-02 17:10:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:10:30.001922454 +0000 UTC m=+1221.623526534" watchObservedRunningTime="2026-02-02 17:10:30.005703801 +0000 UTC m=+1221.627307881" Feb 02 17:10:30 crc kubenswrapper[4835]: I0202 17:10:30.027121 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-bftbv" podStartSLOduration=3.027099407 podStartE2EDuration="3.027099407s" podCreationTimestamp="2026-02-02 17:10:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:10:30.019820971 +0000 UTC m=+1221.641425071" watchObservedRunningTime="2026-02-02 17:10:30.027099407 +0000 UTC m=+1221.648703497" Feb 02 17:10:30 crc kubenswrapper[4835]: I0202 17:10:30.979641 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wtzmt" event={"ID":"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6","Type":"ContainerStarted","Data":"575b04fb6bd5a6e55cee6452abc9e43a2d4bf78fbe63f1332736ac695d8477ed"} Feb 02 17:10:30 crc kubenswrapper[4835]: I0202 17:10:30.989465 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" event={"ID":"413f9f5d-9076-437b-a5b6-0b7404b81446","Type":"ContainerStarted","Data":"871df2af5aa304822677e07946e6b6fa70558c987572e12e9d45067660893a1c"} Feb 02 17:10:31 crc kubenswrapper[4835]: I0202 17:10:31.010825 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" podStartSLOduration=3.010805886 podStartE2EDuration="3.010805886s" podCreationTimestamp="2026-02-02 17:10:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:10:31.009408996 +0000 UTC m=+1222.631013076" watchObservedRunningTime="2026-02-02 17:10:31.010805886 +0000 UTC m=+1222.632409966" Feb 02 17:10:31 crc kubenswrapper[4835]: I0202 17:10:31.232312 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:31 crc kubenswrapper[4835]: I0202 17:10:31.244962 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 17:10:32 crc kubenswrapper[4835]: I0202 17:10:32.000714 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.035243 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d6a1430-374c-40ad-bab8-dfd817bafe7d","Type":"ContainerStarted","Data":"7e4b098a256c9db5fe63687cb6fa96cbd3064a47fc70b794ca57602a1589365c"} Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.035860 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d6a1430-374c-40ad-bab8-dfd817bafe7d","Type":"ContainerStarted","Data":"496a1fb2e2d1cf71e3fbfae681b92df9abb9db0338d991ff94293814400ebf2d"} Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.036752 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"645a2937-2612-4a25-a28b-c77bf1453d7f","Type":"ContainerStarted","Data":"cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be"} Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.036812 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="645a2937-2612-4a25-a28b-c77bf1453d7f" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be" gracePeriod=30 Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.039773 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2ddee5cf-1f78-4957-8c94-ec89fda1bb29","Type":"ContainerStarted","Data":"8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f"} Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.039801 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2ddee5cf-1f78-4957-8c94-ec89fda1bb29","Type":"ContainerStarted","Data":"900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4"} Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.039884 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2ddee5cf-1f78-4957-8c94-ec89fda1bb29" containerName="nova-metadata-log" containerID="cri-o://900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4" gracePeriod=30 Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.039985 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2ddee5cf-1f78-4957-8c94-ec89fda1bb29" containerName="nova-metadata-metadata" containerID="cri-o://8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f" gracePeriod=30 Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.050101 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d4389d06-4543-4757-9a9e-3df501e4b228","Type":"ContainerStarted","Data":"cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93"} Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.067159 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.481737822 podStartE2EDuration="8.06714202s" podCreationTimestamp="2026-02-02 17:10:27 +0000 UTC" firstStartedPulling="2026-02-02 17:10:29.18434072 +0000 UTC m=+1220.805944800" lastFinishedPulling="2026-02-02 17:10:33.769744878 +0000 UTC m=+1225.391348998" observedRunningTime="2026-02-02 17:10:35.060815511 +0000 UTC m=+1226.682419591" watchObservedRunningTime="2026-02-02 17:10:35.06714202 +0000 UTC m=+1226.688746100" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.080752 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.021276063 podStartE2EDuration="8.080730705s" podCreationTimestamp="2026-02-02 17:10:27 +0000 UTC" firstStartedPulling="2026-02-02 17:10:28.713456236 +0000 UTC m=+1220.335060326" lastFinishedPulling="2026-02-02 17:10:33.772910888 +0000 UTC m=+1225.394514968" observedRunningTime="2026-02-02 17:10:35.077634097 +0000 UTC m=+1226.699238187" watchObservedRunningTime="2026-02-02 17:10:35.080730705 +0000 UTC m=+1226.702334785" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.109333 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.3755657550000002 podStartE2EDuration="8.109307394s" podCreationTimestamp="2026-02-02 17:10:27 +0000 UTC" firstStartedPulling="2026-02-02 17:10:29.040441545 +0000 UTC m=+1220.662045625" lastFinishedPulling="2026-02-02 17:10:33.774183184 +0000 UTC m=+1225.395787264" observedRunningTime="2026-02-02 17:10:35.095367689 +0000 UTC m=+1226.716971769" watchObservedRunningTime="2026-02-02 17:10:35.109307394 +0000 UTC m=+1226.730911484" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.113593 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.539053755 podStartE2EDuration="8.113572895s" podCreationTimestamp="2026-02-02 17:10:27 +0000 UTC" firstStartedPulling="2026-02-02 17:10:29.19636228 +0000 UTC m=+1220.817966360" lastFinishedPulling="2026-02-02 17:10:33.77088139 +0000 UTC m=+1225.392485500" observedRunningTime="2026-02-02 17:10:35.112011091 +0000 UTC m=+1226.733615171" watchObservedRunningTime="2026-02-02 17:10:35.113572895 +0000 UTC m=+1226.735176985" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.622613 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.747249 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-logs\") pod \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.747575 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-logs" (OuterVolumeSpecName: "logs") pod "2ddee5cf-1f78-4957-8c94-ec89fda1bb29" (UID: "2ddee5cf-1f78-4957-8c94-ec89fda1bb29"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.748146 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-combined-ca-bundle\") pod \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.748223 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-config-data\") pod \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.748258 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzm5g\" (UniqueName: \"kubernetes.io/projected/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-kube-api-access-wzm5g\") pod \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\" (UID: \"2ddee5cf-1f78-4957-8c94-ec89fda1bb29\") " Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.748835 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.752795 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-kube-api-access-wzm5g" (OuterVolumeSpecName: "kube-api-access-wzm5g") pod "2ddee5cf-1f78-4957-8c94-ec89fda1bb29" (UID: "2ddee5cf-1f78-4957-8c94-ec89fda1bb29"). InnerVolumeSpecName "kube-api-access-wzm5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.781785 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-config-data" (OuterVolumeSpecName: "config-data") pod "2ddee5cf-1f78-4957-8c94-ec89fda1bb29" (UID: "2ddee5cf-1f78-4957-8c94-ec89fda1bb29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.800583 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ddee5cf-1f78-4957-8c94-ec89fda1bb29" (UID: "2ddee5cf-1f78-4957-8c94-ec89fda1bb29"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.850737 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.850779 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:35 crc kubenswrapper[4835]: I0202 17:10:35.850795 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzm5g\" (UniqueName: \"kubernetes.io/projected/2ddee5cf-1f78-4957-8c94-ec89fda1bb29-kube-api-access-wzm5g\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.062521 4835 generic.go:334] "Generic (PLEG): container finished" podID="2ddee5cf-1f78-4957-8c94-ec89fda1bb29" containerID="8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f" exitCode=0 Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.063027 4835 generic.go:334] "Generic (PLEG): container finished" podID="2ddee5cf-1f78-4957-8c94-ec89fda1bb29" containerID="900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4" exitCode=143 Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.062617 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.062634 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2ddee5cf-1f78-4957-8c94-ec89fda1bb29","Type":"ContainerDied","Data":"8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f"} Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.063186 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2ddee5cf-1f78-4957-8c94-ec89fda1bb29","Type":"ContainerDied","Data":"900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4"} Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.063208 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2ddee5cf-1f78-4957-8c94-ec89fda1bb29","Type":"ContainerDied","Data":"24773528e2d25c557dcdd01edca006a70bc30e1531fbc23349f170ce22ceb2d5"} Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.063235 4835 scope.go:117] "RemoveContainer" containerID="8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.104153 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.109542 4835 scope.go:117] "RemoveContainer" containerID="900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.115055 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.135082 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:36 crc kubenswrapper[4835]: E0202 17:10:36.135726 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ddee5cf-1f78-4957-8c94-ec89fda1bb29" containerName="nova-metadata-log" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.135769 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ddee5cf-1f78-4957-8c94-ec89fda1bb29" containerName="nova-metadata-log" Feb 02 17:10:36 crc kubenswrapper[4835]: E0202 17:10:36.135808 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ddee5cf-1f78-4957-8c94-ec89fda1bb29" containerName="nova-metadata-metadata" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.135820 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ddee5cf-1f78-4957-8c94-ec89fda1bb29" containerName="nova-metadata-metadata" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.136210 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ddee5cf-1f78-4957-8c94-ec89fda1bb29" containerName="nova-metadata-metadata" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.136261 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ddee5cf-1f78-4957-8c94-ec89fda1bb29" containerName="nova-metadata-log" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.138597 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.141428 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.143347 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.146652 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.154755 4835 scope.go:117] "RemoveContainer" containerID="8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.157486 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r6rt\" (UniqueName: \"kubernetes.io/projected/09de5e87-0387-46b2-9eb5-67dd9fa92127-kube-api-access-8r6rt\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.157889 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-config-data\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.157918 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.157996 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09de5e87-0387-46b2-9eb5-67dd9fa92127-logs\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.158046 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: E0202 17:10:36.159324 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f\": container with ID starting with 8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f not found: ID does not exist" containerID="8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.159359 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f"} err="failed to get container status \"8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f\": rpc error: code = NotFound desc = could not find container \"8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f\": container with ID starting with 8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f not found: ID does not exist" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.159390 4835 scope.go:117] "RemoveContainer" containerID="900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4" Feb 02 17:10:36 crc kubenswrapper[4835]: E0202 17:10:36.159956 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4\": container with ID starting with 900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4 not found: ID does not exist" containerID="900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.160013 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4"} err="failed to get container status \"900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4\": rpc error: code = NotFound desc = could not find container \"900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4\": container with ID starting with 900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4 not found: ID does not exist" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.160028 4835 scope.go:117] "RemoveContainer" containerID="8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.160567 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f"} err="failed to get container status \"8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f\": rpc error: code = NotFound desc = could not find container \"8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f\": container with ID starting with 8f11c5dd9c0a834ccfcbbae450df370e70d5a7e214d8c44c8ffbfb816bcfad4f not found: ID does not exist" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.160606 4835 scope.go:117] "RemoveContainer" containerID="900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.160959 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4"} err="failed to get container status \"900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4\": rpc error: code = NotFound desc = could not find container \"900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4\": container with ID starting with 900a1c1253c2ad7316dd210a7b33ce49a424e781c13d6aeecfeb2a3a1fcad3c4 not found: ID does not exist" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.260045 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r6rt\" (UniqueName: \"kubernetes.io/projected/09de5e87-0387-46b2-9eb5-67dd9fa92127-kube-api-access-8r6rt\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.260109 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-config-data\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.260138 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.260244 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09de5e87-0387-46b2-9eb5-67dd9fa92127-logs\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.260305 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.260998 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09de5e87-0387-46b2-9eb5-67dd9fa92127-logs\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.264833 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.265120 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-config-data\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.278598 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.280765 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r6rt\" (UniqueName: \"kubernetes.io/projected/09de5e87-0387-46b2-9eb5-67dd9fa92127-kube-api-access-8r6rt\") pod \"nova-metadata-0\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.497409 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:10:36 crc kubenswrapper[4835]: I0202 17:10:36.949430 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:36 crc kubenswrapper[4835]: W0202 17:10:36.953221 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09de5e87_0387_46b2_9eb5_67dd9fa92127.slice/crio-d29dcae6d1aeb6415c9861ad2791df3c78b62efe9f9d0bcecb41fdd9f4d2d3c6 WatchSource:0}: Error finding container d29dcae6d1aeb6415c9861ad2791df3c78b62efe9f9d0bcecb41fdd9f4d2d3c6: Status 404 returned error can't find the container with id d29dcae6d1aeb6415c9861ad2791df3c78b62efe9f9d0bcecb41fdd9f4d2d3c6 Feb 02 17:10:37 crc kubenswrapper[4835]: I0202 17:10:37.072606 4835 generic.go:334] "Generic (PLEG): container finished" podID="9faacb79-efaa-411d-9d65-23b6b602b4d2" containerID="42d0cb082a50cb0d6c429ebb9f06a478f7a888aef9d9c712877d6e7a72c670a7" exitCode=0 Feb 02 17:10:37 crc kubenswrapper[4835]: I0202 17:10:37.072699 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bftbv" event={"ID":"9faacb79-efaa-411d-9d65-23b6b602b4d2","Type":"ContainerDied","Data":"42d0cb082a50cb0d6c429ebb9f06a478f7a888aef9d9c712877d6e7a72c670a7"} Feb 02 17:10:37 crc kubenswrapper[4835]: I0202 17:10:37.076138 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"09de5e87-0387-46b2-9eb5-67dd9fa92127","Type":"ContainerStarted","Data":"d29dcae6d1aeb6415c9861ad2791df3c78b62efe9f9d0bcecb41fdd9f4d2d3c6"} Feb 02 17:10:37 crc kubenswrapper[4835]: I0202 17:10:37.200284 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ddee5cf-1f78-4957-8c94-ec89fda1bb29" path="/var/lib/kubelet/pods/2ddee5cf-1f78-4957-8c94-ec89fda1bb29/volumes" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.090903 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"09de5e87-0387-46b2-9eb5-67dd9fa92127","Type":"ContainerStarted","Data":"7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b"} Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.091517 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"09de5e87-0387-46b2-9eb5-67dd9fa92127","Type":"ContainerStarted","Data":"42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627"} Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.136341 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.136319429 podStartE2EDuration="2.136319429s" podCreationTimestamp="2026-02-02 17:10:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:10:38.128843557 +0000 UTC m=+1229.750447637" watchObservedRunningTime="2026-02-02 17:10:38.136319429 +0000 UTC m=+1229.757923509" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.156598 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.157237 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.184035 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.468867 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.491753 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.523169 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.523255 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.555708 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.610142 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-config-data\") pod \"9faacb79-efaa-411d-9d65-23b6b602b4d2\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.610314 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-scripts\") pod \"9faacb79-efaa-411d-9d65-23b6b602b4d2\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.610430 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xstx\" (UniqueName: \"kubernetes.io/projected/9faacb79-efaa-411d-9d65-23b6b602b4d2-kube-api-access-6xstx\") pod \"9faacb79-efaa-411d-9d65-23b6b602b4d2\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.610463 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-combined-ca-bundle\") pod \"9faacb79-efaa-411d-9d65-23b6b602b4d2\" (UID: \"9faacb79-efaa-411d-9d65-23b6b602b4d2\") " Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.621728 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9faacb79-efaa-411d-9d65-23b6b602b4d2-kube-api-access-6xstx" (OuterVolumeSpecName: "kube-api-access-6xstx") pod "9faacb79-efaa-411d-9d65-23b6b602b4d2" (UID: "9faacb79-efaa-411d-9d65-23b6b602b4d2"). InnerVolumeSpecName "kube-api-access-6xstx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.625943 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-wgs86"] Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.626249 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" podUID="a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" containerName="dnsmasq-dns" containerID="cri-o://40722646456bae29e7e4f33bfdda31d100f0995b32dd23015176fb24a5c5b2ab" gracePeriod=10 Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.628393 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-scripts" (OuterVolumeSpecName: "scripts") pod "9faacb79-efaa-411d-9d65-23b6b602b4d2" (UID: "9faacb79-efaa-411d-9d65-23b6b602b4d2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.676808 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-config-data" (OuterVolumeSpecName: "config-data") pod "9faacb79-efaa-411d-9d65-23b6b602b4d2" (UID: "9faacb79-efaa-411d-9d65-23b6b602b4d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.700715 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9faacb79-efaa-411d-9d65-23b6b602b4d2" (UID: "9faacb79-efaa-411d-9d65-23b6b602b4d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.715396 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xstx\" (UniqueName: \"kubernetes.io/projected/9faacb79-efaa-411d-9d65-23b6b602b4d2-kube-api-access-6xstx\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.715458 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.715473 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:38 crc kubenswrapper[4835]: I0202 17:10:38.715487 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9faacb79-efaa-411d-9d65-23b6b602b4d2-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.103687 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bftbv" event={"ID":"9faacb79-efaa-411d-9d65-23b6b602b4d2","Type":"ContainerDied","Data":"8faecb04d5bcb34690fb645527e870cb534ff6a86a54218b4911dc76cdb3faa6"} Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.103726 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8faecb04d5bcb34690fb645527e870cb534ff6a86a54218b4911dc76cdb3faa6" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.103729 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bftbv" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.109740 4835 generic.go:334] "Generic (PLEG): container finished" podID="a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" containerID="40722646456bae29e7e4f33bfdda31d100f0995b32dd23015176fb24a5c5b2ab" exitCode=0 Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.109846 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" event={"ID":"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6","Type":"ContainerDied","Data":"40722646456bae29e7e4f33bfdda31d100f0995b32dd23015176fb24a5c5b2ab"} Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.109872 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" event={"ID":"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6","Type":"ContainerDied","Data":"93721595a6287fced36c97190847cf30e0a088eeb04140632f9be211dae7b155"} Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.109883 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93721595a6287fced36c97190847cf30e0a088eeb04140632f9be211dae7b155" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.115722 4835 generic.go:334] "Generic (PLEG): container finished" podID="97d1d3dc-e7a1-4a15-b586-a285bcc6cff6" containerID="575b04fb6bd5a6e55cee6452abc9e43a2d4bf78fbe63f1332736ac695d8477ed" exitCode=0 Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.116512 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wtzmt" event={"ID":"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6","Type":"ContainerDied","Data":"575b04fb6bd5a6e55cee6452abc9e43a2d4bf78fbe63f1332736ac695d8477ed"} Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.162399 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.218796 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.360855 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxsmw\" (UniqueName: \"kubernetes.io/projected/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-kube-api-access-xxsmw\") pod \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.360911 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-nb\") pod \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.360948 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-dns-svc\") pod \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.360970 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-config\") pod \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.361061 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-sb\") pod \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\" (UID: \"a8f2f577-d44e-4442-96b1-cc95f4f8d3d6\") " Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.370783 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.371794 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerName="nova-api-api" containerID="cri-o://7e4b098a256c9db5fe63687cb6fa96cbd3064a47fc70b794ca57602a1589365c" gracePeriod=30 Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.371368 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerName="nova-api-log" containerID="cri-o://496a1fb2e2d1cf71e3fbfae681b92df9abb9db0338d991ff94293814400ebf2d" gracePeriod=30 Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.390863 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.178:8774/\": EOF" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.391438 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-kube-api-access-xxsmw" (OuterVolumeSpecName: "kube-api-access-xxsmw") pod "a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" (UID: "a8f2f577-d44e-4442-96b1-cc95f4f8d3d6"). InnerVolumeSpecName "kube-api-access-xxsmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.391692 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.178:8774/\": EOF" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.424150 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.464849 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" (UID: "a8f2f577-d44e-4442-96b1-cc95f4f8d3d6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.464902 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxsmw\" (UniqueName: \"kubernetes.io/projected/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-kube-api-access-xxsmw\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.473754 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-config" (OuterVolumeSpecName: "config") pod "a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" (UID: "a8f2f577-d44e-4442-96b1-cc95f4f8d3d6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.491047 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" (UID: "a8f2f577-d44e-4442-96b1-cc95f4f8d3d6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.500727 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" (UID: "a8f2f577-d44e-4442-96b1-cc95f4f8d3d6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.567183 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.567222 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.567232 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.567240 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:39 crc kubenswrapper[4835]: I0202 17:10:39.657561 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.128781 4835 generic.go:334] "Generic (PLEG): container finished" podID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerID="496a1fb2e2d1cf71e3fbfae681b92df9abb9db0338d991ff94293814400ebf2d" exitCode=143 Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.128991 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d6a1430-374c-40ad-bab8-dfd817bafe7d","Type":"ContainerDied","Data":"496a1fb2e2d1cf71e3fbfae681b92df9abb9db0338d991ff94293814400ebf2d"} Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.129045 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-wgs86" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.131645 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="09de5e87-0387-46b2-9eb5-67dd9fa92127" containerName="nova-metadata-log" containerID="cri-o://42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627" gracePeriod=30 Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.132055 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="09de5e87-0387-46b2-9eb5-67dd9fa92127" containerName="nova-metadata-metadata" containerID="cri-o://7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b" gracePeriod=30 Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.186576 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-wgs86"] Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.198082 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-wgs86"] Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.572519 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.679142 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.695141 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-scripts\") pod \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.695212 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-combined-ca-bundle\") pod \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.695249 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-config-data\") pod \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.695508 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49q7q\" (UniqueName: \"kubernetes.io/projected/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-kube-api-access-49q7q\") pod \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\" (UID: \"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6\") " Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.711601 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-kube-api-access-49q7q" (OuterVolumeSpecName: "kube-api-access-49q7q") pod "97d1d3dc-e7a1-4a15-b586-a285bcc6cff6" (UID: "97d1d3dc-e7a1-4a15-b586-a285bcc6cff6"). InnerVolumeSpecName "kube-api-access-49q7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.711736 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-scripts" (OuterVolumeSpecName: "scripts") pod "97d1d3dc-e7a1-4a15-b586-a285bcc6cff6" (UID: "97d1d3dc-e7a1-4a15-b586-a285bcc6cff6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.736346 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97d1d3dc-e7a1-4a15-b586-a285bcc6cff6" (UID: "97d1d3dc-e7a1-4a15-b586-a285bcc6cff6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.740843 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-config-data" (OuterVolumeSpecName: "config-data") pod "97d1d3dc-e7a1-4a15-b586-a285bcc6cff6" (UID: "97d1d3dc-e7a1-4a15-b586-a285bcc6cff6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.797224 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8r6rt\" (UniqueName: \"kubernetes.io/projected/09de5e87-0387-46b2-9eb5-67dd9fa92127-kube-api-access-8r6rt\") pod \"09de5e87-0387-46b2-9eb5-67dd9fa92127\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.797338 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-combined-ca-bundle\") pod \"09de5e87-0387-46b2-9eb5-67dd9fa92127\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.797434 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09de5e87-0387-46b2-9eb5-67dd9fa92127-logs\") pod \"09de5e87-0387-46b2-9eb5-67dd9fa92127\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.797475 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-config-data\") pod \"09de5e87-0387-46b2-9eb5-67dd9fa92127\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.797548 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-nova-metadata-tls-certs\") pod \"09de5e87-0387-46b2-9eb5-67dd9fa92127\" (UID: \"09de5e87-0387-46b2-9eb5-67dd9fa92127\") " Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.798014 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.798041 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.798054 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49q7q\" (UniqueName: \"kubernetes.io/projected/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-kube-api-access-49q7q\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.798068 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.798143 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09de5e87-0387-46b2-9eb5-67dd9fa92127-logs" (OuterVolumeSpecName: "logs") pod "09de5e87-0387-46b2-9eb5-67dd9fa92127" (UID: "09de5e87-0387-46b2-9eb5-67dd9fa92127"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.801465 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09de5e87-0387-46b2-9eb5-67dd9fa92127-kube-api-access-8r6rt" (OuterVolumeSpecName: "kube-api-access-8r6rt") pod "09de5e87-0387-46b2-9eb5-67dd9fa92127" (UID: "09de5e87-0387-46b2-9eb5-67dd9fa92127"). InnerVolumeSpecName "kube-api-access-8r6rt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.827703 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09de5e87-0387-46b2-9eb5-67dd9fa92127" (UID: "09de5e87-0387-46b2-9eb5-67dd9fa92127"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.831143 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-config-data" (OuterVolumeSpecName: "config-data") pod "09de5e87-0387-46b2-9eb5-67dd9fa92127" (UID: "09de5e87-0387-46b2-9eb5-67dd9fa92127"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.850951 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "09de5e87-0387-46b2-9eb5-67dd9fa92127" (UID: "09de5e87-0387-46b2-9eb5-67dd9fa92127"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.899429 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8r6rt\" (UniqueName: \"kubernetes.io/projected/09de5e87-0387-46b2-9eb5-67dd9fa92127-kube-api-access-8r6rt\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.899991 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.900105 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09de5e87-0387-46b2-9eb5-67dd9fa92127-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.900192 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:40 crc kubenswrapper[4835]: I0202 17:10:40.900265 4835 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/09de5e87-0387-46b2-9eb5-67dd9fa92127-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.138479 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-wtzmt" event={"ID":"97d1d3dc-e7a1-4a15-b586-a285bcc6cff6","Type":"ContainerDied","Data":"26ccdf7aff802523915d721e14a4df99d69311b2668aaf93ce31bd6ee735cbe6"} Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.138503 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-wtzmt" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.138593 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26ccdf7aff802523915d721e14a4df99d69311b2668aaf93ce31bd6ee735cbe6" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.141874 4835 generic.go:334] "Generic (PLEG): container finished" podID="09de5e87-0387-46b2-9eb5-67dd9fa92127" containerID="7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b" exitCode=0 Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.141901 4835 generic.go:334] "Generic (PLEG): container finished" podID="09de5e87-0387-46b2-9eb5-67dd9fa92127" containerID="42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627" exitCode=143 Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.141928 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.141968 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"09de5e87-0387-46b2-9eb5-67dd9fa92127","Type":"ContainerDied","Data":"7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b"} Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.141993 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"09de5e87-0387-46b2-9eb5-67dd9fa92127","Type":"ContainerDied","Data":"42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627"} Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.142003 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"09de5e87-0387-46b2-9eb5-67dd9fa92127","Type":"ContainerDied","Data":"d29dcae6d1aeb6415c9861ad2791df3c78b62efe9f9d0bcecb41fdd9f4d2d3c6"} Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.142017 4835 scope.go:117] "RemoveContainer" containerID="7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.142737 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d4389d06-4543-4757-9a9e-3df501e4b228" containerName="nova-scheduler-scheduler" containerID="cri-o://cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93" gracePeriod=30 Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.169009 4835 scope.go:117] "RemoveContainer" containerID="42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.181985 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.200133 4835 scope.go:117] "RemoveContainer" containerID="7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b" Feb 02 17:10:41 crc kubenswrapper[4835]: E0202 17:10:41.200651 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b\": container with ID starting with 7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b not found: ID does not exist" containerID="7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.200759 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b"} err="failed to get container status \"7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b\": rpc error: code = NotFound desc = could not find container \"7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b\": container with ID starting with 7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b not found: ID does not exist" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.200847 4835 scope.go:117] "RemoveContainer" containerID="42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627" Feb 02 17:10:41 crc kubenswrapper[4835]: E0202 17:10:41.201919 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627\": container with ID starting with 42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627 not found: ID does not exist" containerID="42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.201974 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627"} err="failed to get container status \"42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627\": rpc error: code = NotFound desc = could not find container \"42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627\": container with ID starting with 42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627 not found: ID does not exist" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.202013 4835 scope.go:117] "RemoveContainer" containerID="7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.202433 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b"} err="failed to get container status \"7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b\": rpc error: code = NotFound desc = could not find container \"7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b\": container with ID starting with 7a39cbd56ea590325d687c8d38482d90e4f29d1306b4bf9c8fe11b98af56877b not found: ID does not exist" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.202522 4835 scope.go:117] "RemoveContainer" containerID="42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.202775 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627"} err="failed to get container status \"42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627\": rpc error: code = NotFound desc = could not find container \"42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627\": container with ID starting with 42badcdf4c3a91e858e94ef2ecd8f55275963c45fa37d352668094c9ed3d7627 not found: ID does not exist" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.205298 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" path="/var/lib/kubelet/pods/a8f2f577-d44e-4442-96b1-cc95f4f8d3d6/volumes" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.206031 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.206121 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.247434 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:41 crc kubenswrapper[4835]: E0202 17:10:41.247974 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d1d3dc-e7a1-4a15-b586-a285bcc6cff6" containerName="nova-cell1-conductor-db-sync" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.247998 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d1d3dc-e7a1-4a15-b586-a285bcc6cff6" containerName="nova-cell1-conductor-db-sync" Feb 02 17:10:41 crc kubenswrapper[4835]: E0202 17:10:41.248011 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" containerName="dnsmasq-dns" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.248021 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" containerName="dnsmasq-dns" Feb 02 17:10:41 crc kubenswrapper[4835]: E0202 17:10:41.248034 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" containerName="init" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.248079 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" containerName="init" Feb 02 17:10:41 crc kubenswrapper[4835]: E0202 17:10:41.248096 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09de5e87-0387-46b2-9eb5-67dd9fa92127" containerName="nova-metadata-metadata" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.248103 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="09de5e87-0387-46b2-9eb5-67dd9fa92127" containerName="nova-metadata-metadata" Feb 02 17:10:41 crc kubenswrapper[4835]: E0202 17:10:41.248114 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09de5e87-0387-46b2-9eb5-67dd9fa92127" containerName="nova-metadata-log" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.248121 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="09de5e87-0387-46b2-9eb5-67dd9fa92127" containerName="nova-metadata-log" Feb 02 17:10:41 crc kubenswrapper[4835]: E0202 17:10:41.248145 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9faacb79-efaa-411d-9d65-23b6b602b4d2" containerName="nova-manage" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.248152 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="9faacb79-efaa-411d-9d65-23b6b602b4d2" containerName="nova-manage" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.248435 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8f2f577-d44e-4442-96b1-cc95f4f8d3d6" containerName="dnsmasq-dns" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.248451 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="97d1d3dc-e7a1-4a15-b586-a285bcc6cff6" containerName="nova-cell1-conductor-db-sync" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.248463 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="09de5e87-0387-46b2-9eb5-67dd9fa92127" containerName="nova-metadata-metadata" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.248472 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="09de5e87-0387-46b2-9eb5-67dd9fa92127" containerName="nova-metadata-log" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.248482 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="9faacb79-efaa-411d-9d65-23b6b602b4d2" containerName="nova-manage" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.249388 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.253627 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.254005 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.263596 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.295344 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.296557 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.302355 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.305336 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.409606 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.409826 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7572ca57-a7e7-4025-8688-de2e52ece174-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7572ca57-a7e7-4025-8688-de2e52ece174\") " pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.409872 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-config-data\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.409979 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4vgx\" (UniqueName: \"kubernetes.io/projected/7572ca57-a7e7-4025-8688-de2e52ece174-kube-api-access-b4vgx\") pod \"nova-cell1-conductor-0\" (UID: \"7572ca57-a7e7-4025-8688-de2e52ece174\") " pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.410112 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.410226 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh6jp\" (UniqueName: \"kubernetes.io/projected/2530b65e-b20c-47ce-b898-7d272a7080eb-kube-api-access-wh6jp\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.410333 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7572ca57-a7e7-4025-8688-de2e52ece174-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7572ca57-a7e7-4025-8688-de2e52ece174\") " pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.410367 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2530b65e-b20c-47ce-b898-7d272a7080eb-logs\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.511762 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4vgx\" (UniqueName: \"kubernetes.io/projected/7572ca57-a7e7-4025-8688-de2e52ece174-kube-api-access-b4vgx\") pod \"nova-cell1-conductor-0\" (UID: \"7572ca57-a7e7-4025-8688-de2e52ece174\") " pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.511881 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.511931 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh6jp\" (UniqueName: \"kubernetes.io/projected/2530b65e-b20c-47ce-b898-7d272a7080eb-kube-api-access-wh6jp\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.511972 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7572ca57-a7e7-4025-8688-de2e52ece174-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7572ca57-a7e7-4025-8688-de2e52ece174\") " pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.512014 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2530b65e-b20c-47ce-b898-7d272a7080eb-logs\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.512044 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.512090 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7572ca57-a7e7-4025-8688-de2e52ece174-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7572ca57-a7e7-4025-8688-de2e52ece174\") " pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.512112 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-config-data\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.512687 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2530b65e-b20c-47ce-b898-7d272a7080eb-logs\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.520137 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7572ca57-a7e7-4025-8688-de2e52ece174-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7572ca57-a7e7-4025-8688-de2e52ece174\") " pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.520637 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.529447 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-config-data\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.529895 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7572ca57-a7e7-4025-8688-de2e52ece174-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7572ca57-a7e7-4025-8688-de2e52ece174\") " pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.531432 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.532405 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4vgx\" (UniqueName: \"kubernetes.io/projected/7572ca57-a7e7-4025-8688-de2e52ece174-kube-api-access-b4vgx\") pod \"nova-cell1-conductor-0\" (UID: \"7572ca57-a7e7-4025-8688-de2e52ece174\") " pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.533802 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh6jp\" (UniqueName: \"kubernetes.io/projected/2530b65e-b20c-47ce-b898-7d272a7080eb-kube-api-access-wh6jp\") pod \"nova-metadata-0\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.572722 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:10:41 crc kubenswrapper[4835]: I0202 17:10:41.624570 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:42 crc kubenswrapper[4835]: W0202 17:10:42.081610 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2530b65e_b20c_47ce_b898_7d272a7080eb.slice/crio-5d6e3c3990a3acea7ce042dcd462efc37e86cebcc974d8416c6152efafc5a557 WatchSource:0}: Error finding container 5d6e3c3990a3acea7ce042dcd462efc37e86cebcc974d8416c6152efafc5a557: Status 404 returned error can't find the container with id 5d6e3c3990a3acea7ce042dcd462efc37e86cebcc974d8416c6152efafc5a557 Feb 02 17:10:42 crc kubenswrapper[4835]: I0202 17:10:42.082471 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:10:42 crc kubenswrapper[4835]: I0202 17:10:42.161335 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2530b65e-b20c-47ce-b898-7d272a7080eb","Type":"ContainerStarted","Data":"5d6e3c3990a3acea7ce042dcd462efc37e86cebcc974d8416c6152efafc5a557"} Feb 02 17:10:42 crc kubenswrapper[4835]: I0202 17:10:42.169536 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 17:10:42 crc kubenswrapper[4835]: W0202 17:10:42.172807 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7572ca57_a7e7_4025_8688_de2e52ece174.slice/crio-d864335e88262176733fed2e39357b02e7b52437c6605da601babbc0dbc9f59f WatchSource:0}: Error finding container d864335e88262176733fed2e39357b02e7b52437c6605da601babbc0dbc9f59f: Status 404 returned error can't find the container with id d864335e88262176733fed2e39357b02e7b52437c6605da601babbc0dbc9f59f Feb 02 17:10:43 crc kubenswrapper[4835]: E0202 17:10:43.159438 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 17:10:43 crc kubenswrapper[4835]: E0202 17:10:43.161893 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 17:10:43 crc kubenswrapper[4835]: E0202 17:10:43.166388 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 17:10:43 crc kubenswrapper[4835]: E0202 17:10:43.166452 4835 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d4389d06-4543-4757-9a9e-3df501e4b228" containerName="nova-scheduler-scheduler" Feb 02 17:10:43 crc kubenswrapper[4835]: I0202 17:10:43.175675 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2530b65e-b20c-47ce-b898-7d272a7080eb","Type":"ContainerStarted","Data":"3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5"} Feb 02 17:10:43 crc kubenswrapper[4835]: I0202 17:10:43.175956 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2530b65e-b20c-47ce-b898-7d272a7080eb","Type":"ContainerStarted","Data":"65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d"} Feb 02 17:10:43 crc kubenswrapper[4835]: I0202 17:10:43.178591 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7572ca57-a7e7-4025-8688-de2e52ece174","Type":"ContainerStarted","Data":"46e3697cd2e746b680a1c03b1a580c9f0eef84738998979467d6c38515130680"} Feb 02 17:10:43 crc kubenswrapper[4835]: I0202 17:10:43.178714 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7572ca57-a7e7-4025-8688-de2e52ece174","Type":"ContainerStarted","Data":"d864335e88262176733fed2e39357b02e7b52437c6605da601babbc0dbc9f59f"} Feb 02 17:10:43 crc kubenswrapper[4835]: I0202 17:10:43.178790 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:43 crc kubenswrapper[4835]: I0202 17:10:43.200434 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09de5e87-0387-46b2-9eb5-67dd9fa92127" path="/var/lib/kubelet/pods/09de5e87-0387-46b2-9eb5-67dd9fa92127/volumes" Feb 02 17:10:43 crc kubenswrapper[4835]: I0202 17:10:43.206249 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.206221547 podStartE2EDuration="2.206221547s" podCreationTimestamp="2026-02-02 17:10:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:10:43.192829038 +0000 UTC m=+1234.814433118" watchObservedRunningTime="2026-02-02 17:10:43.206221547 +0000 UTC m=+1234.827825617" Feb 02 17:10:43 crc kubenswrapper[4835]: I0202 17:10:43.218825 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.218806183 podStartE2EDuration="2.218806183s" podCreationTimestamp="2026-02-02 17:10:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:10:43.212015371 +0000 UTC m=+1234.833619451" watchObservedRunningTime="2026-02-02 17:10:43.218806183 +0000 UTC m=+1234.840410263" Feb 02 17:10:43 crc kubenswrapper[4835]: I0202 17:10:43.663266 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 17:10:43 crc kubenswrapper[4835]: I0202 17:10:43.663878 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="f808d0ef-5504-4d6c-9551-28b94cb89838" containerName="kube-state-metrics" containerID="cri-o://6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac" gracePeriod=30 Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.117782 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.193121 4835 generic.go:334] "Generic (PLEG): container finished" podID="f808d0ef-5504-4d6c-9551-28b94cb89838" containerID="6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac" exitCode=2 Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.193180 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.193203 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f808d0ef-5504-4d6c-9551-28b94cb89838","Type":"ContainerDied","Data":"6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac"} Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.193327 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f808d0ef-5504-4d6c-9551-28b94cb89838","Type":"ContainerDied","Data":"c49868b3586ba6377537a195ec15267de926440fd412e3d532ffdadd48ffc290"} Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.193354 4835 scope.go:117] "RemoveContainer" containerID="6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.215308 4835 scope.go:117] "RemoveContainer" containerID="6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac" Feb 02 17:10:44 crc kubenswrapper[4835]: E0202 17:10:44.215715 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac\": container with ID starting with 6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac not found: ID does not exist" containerID="6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.215739 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac"} err="failed to get container status \"6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac\": rpc error: code = NotFound desc = could not find container \"6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac\": container with ID starting with 6e61f4c9a1115c67ca55cdab402c96589da5cd99658bd707179803df689f2fac not found: ID does not exist" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.265036 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4swpg\" (UniqueName: \"kubernetes.io/projected/f808d0ef-5504-4d6c-9551-28b94cb89838-kube-api-access-4swpg\") pod \"f808d0ef-5504-4d6c-9551-28b94cb89838\" (UID: \"f808d0ef-5504-4d6c-9551-28b94cb89838\") " Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.272743 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f808d0ef-5504-4d6c-9551-28b94cb89838-kube-api-access-4swpg" (OuterVolumeSpecName: "kube-api-access-4swpg") pod "f808d0ef-5504-4d6c-9551-28b94cb89838" (UID: "f808d0ef-5504-4d6c-9551-28b94cb89838"). InnerVolumeSpecName "kube-api-access-4swpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.367387 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4swpg\" (UniqueName: \"kubernetes.io/projected/f808d0ef-5504-4d6c-9551-28b94cb89838-kube-api-access-4swpg\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.523712 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.532639 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.542081 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 17:10:44 crc kubenswrapper[4835]: E0202 17:10:44.542466 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f808d0ef-5504-4d6c-9551-28b94cb89838" containerName="kube-state-metrics" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.542483 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f808d0ef-5504-4d6c-9551-28b94cb89838" containerName="kube-state-metrics" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.542654 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f808d0ef-5504-4d6c-9551-28b94cb89838" containerName="kube-state-metrics" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.543345 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.546828 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.547221 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.554555 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.671983 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc0384ad-df86-4939-8c71-92aff217a691-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.672041 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x8pf\" (UniqueName: \"kubernetes.io/projected/dc0384ad-df86-4939-8c71-92aff217a691-kube-api-access-7x8pf\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.672209 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/dc0384ad-df86-4939-8c71-92aff217a691-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.672426 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc0384ad-df86-4939-8c71-92aff217a691-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.778022 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc0384ad-df86-4939-8c71-92aff217a691-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.778075 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x8pf\" (UniqueName: \"kubernetes.io/projected/dc0384ad-df86-4939-8c71-92aff217a691-kube-api-access-7x8pf\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.778158 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/dc0384ad-df86-4939-8c71-92aff217a691-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.778290 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc0384ad-df86-4939-8c71-92aff217a691-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.784195 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc0384ad-df86-4939-8c71-92aff217a691-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.784860 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc0384ad-df86-4939-8c71-92aff217a691-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.785053 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/dc0384ad-df86-4939-8c71-92aff217a691-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.786903 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.787171 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="ceilometer-central-agent" containerID="cri-o://3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45" gracePeriod=30 Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.787390 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="sg-core" containerID="cri-o://5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02" gracePeriod=30 Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.787544 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="proxy-httpd" containerID="cri-o://c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66" gracePeriod=30 Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.787605 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="ceilometer-notification-agent" containerID="cri-o://f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27" gracePeriod=30 Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.806167 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x8pf\" (UniqueName: \"kubernetes.io/projected/dc0384ad-df86-4939-8c71-92aff217a691-kube-api-access-7x8pf\") pod \"kube-state-metrics-0\" (UID: \"dc0384ad-df86-4939-8c71-92aff217a691\") " pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.867232 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.870152 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:10:44 crc kubenswrapper[4835]: I0202 17:10:44.870188 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.211153 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f808d0ef-5504-4d6c-9551-28b94cb89838" path="/var/lib/kubelet/pods/f808d0ef-5504-4d6c-9551-28b94cb89838/volumes" Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.223366 4835 generic.go:334] "Generic (PLEG): container finished" podID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerID="7e4b098a256c9db5fe63687cb6fa96cbd3064a47fc70b794ca57602a1589365c" exitCode=0 Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.223412 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d6a1430-374c-40ad-bab8-dfd817bafe7d","Type":"ContainerDied","Data":"7e4b098a256c9db5fe63687cb6fa96cbd3064a47fc70b794ca57602a1589365c"} Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.226652 4835 generic.go:334] "Generic (PLEG): container finished" podID="123189a5-58c2-406b-877a-e39bba484f73" containerID="c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66" exitCode=0 Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.226683 4835 generic.go:334] "Generic (PLEG): container finished" podID="123189a5-58c2-406b-877a-e39bba484f73" containerID="5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02" exitCode=2 Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.226700 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123189a5-58c2-406b-877a-e39bba484f73","Type":"ContainerDied","Data":"c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66"} Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.226720 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123189a5-58c2-406b-877a-e39bba484f73","Type":"ContainerDied","Data":"5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02"} Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.353137 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.435133 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.498517 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-config-data\") pod \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.498650 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-combined-ca-bundle\") pod \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.498776 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d6a1430-374c-40ad-bab8-dfd817bafe7d-logs\") pod \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.498914 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd9qb\" (UniqueName: \"kubernetes.io/projected/8d6a1430-374c-40ad-bab8-dfd817bafe7d-kube-api-access-pd9qb\") pod \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\" (UID: \"8d6a1430-374c-40ad-bab8-dfd817bafe7d\") " Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.499242 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d6a1430-374c-40ad-bab8-dfd817bafe7d-logs" (OuterVolumeSpecName: "logs") pod "8d6a1430-374c-40ad-bab8-dfd817bafe7d" (UID: "8d6a1430-374c-40ad-bab8-dfd817bafe7d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.499481 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d6a1430-374c-40ad-bab8-dfd817bafe7d-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.522284 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d6a1430-374c-40ad-bab8-dfd817bafe7d-kube-api-access-pd9qb" (OuterVolumeSpecName: "kube-api-access-pd9qb") pod "8d6a1430-374c-40ad-bab8-dfd817bafe7d" (UID: "8d6a1430-374c-40ad-bab8-dfd817bafe7d"). InnerVolumeSpecName "kube-api-access-pd9qb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.532813 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d6a1430-374c-40ad-bab8-dfd817bafe7d" (UID: "8d6a1430-374c-40ad-bab8-dfd817bafe7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.533408 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-config-data" (OuterVolumeSpecName: "config-data") pod "8d6a1430-374c-40ad-bab8-dfd817bafe7d" (UID: "8d6a1430-374c-40ad-bab8-dfd817bafe7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.601264 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd9qb\" (UniqueName: \"kubernetes.io/projected/8d6a1430-374c-40ad-bab8-dfd817bafe7d-kube-api-access-pd9qb\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.601322 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.601333 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d6a1430-374c-40ad-bab8-dfd817bafe7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:45 crc kubenswrapper[4835]: I0202 17:10:45.860286 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.007302 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-combined-ca-bundle\") pod \"d4389d06-4543-4757-9a9e-3df501e4b228\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.007663 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-config-data\") pod \"d4389d06-4543-4757-9a9e-3df501e4b228\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.007771 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkdbg\" (UniqueName: \"kubernetes.io/projected/d4389d06-4543-4757-9a9e-3df501e4b228-kube-api-access-dkdbg\") pod \"d4389d06-4543-4757-9a9e-3df501e4b228\" (UID: \"d4389d06-4543-4757-9a9e-3df501e4b228\") " Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.015996 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4389d06-4543-4757-9a9e-3df501e4b228-kube-api-access-dkdbg" (OuterVolumeSpecName: "kube-api-access-dkdbg") pod "d4389d06-4543-4757-9a9e-3df501e4b228" (UID: "d4389d06-4543-4757-9a9e-3df501e4b228"). InnerVolumeSpecName "kube-api-access-dkdbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.041528 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-config-data" (OuterVolumeSpecName: "config-data") pod "d4389d06-4543-4757-9a9e-3df501e4b228" (UID: "d4389d06-4543-4757-9a9e-3df501e4b228"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.045547 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4389d06-4543-4757-9a9e-3df501e4b228" (UID: "d4389d06-4543-4757-9a9e-3df501e4b228"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.109598 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.109645 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4389d06-4543-4757-9a9e-3df501e4b228-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.109659 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkdbg\" (UniqueName: \"kubernetes.io/projected/d4389d06-4543-4757-9a9e-3df501e4b228-kube-api-access-dkdbg\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.238420 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dc0384ad-df86-4939-8c71-92aff217a691","Type":"ContainerStarted","Data":"cd0504f2670cd7e0162348369041d5ec34d9446c5248e514f300b913ce151fd9"} Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.238466 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dc0384ad-df86-4939-8c71-92aff217a691","Type":"ContainerStarted","Data":"df5bbdeedf149a36ab94a1b4c3280e564f9e2b6f48951dac20b8fd0c09dc2ff7"} Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.239393 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.240038 4835 generic.go:334] "Generic (PLEG): container finished" podID="d4389d06-4543-4757-9a9e-3df501e4b228" containerID="cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93" exitCode=0 Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.240079 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d4389d06-4543-4757-9a9e-3df501e4b228","Type":"ContainerDied","Data":"cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93"} Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.240095 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d4389d06-4543-4757-9a9e-3df501e4b228","Type":"ContainerDied","Data":"49d123f44c99f7014ea263da0d5ab1e6e49e19ba8b91c8288e4446f7bc2b33fa"} Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.240110 4835 scope.go:117] "RemoveContainer" containerID="cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.240193 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.243688 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8d6a1430-374c-40ad-bab8-dfd817bafe7d","Type":"ContainerDied","Data":"a75374852cff5678010af9d3126f6503b7eb5490f58c1248b3f855ee69ba1a91"} Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.243787 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.253720 4835 generic.go:334] "Generic (PLEG): container finished" podID="123189a5-58c2-406b-877a-e39bba484f73" containerID="3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45" exitCode=0 Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.253762 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123189a5-58c2-406b-877a-e39bba484f73","Type":"ContainerDied","Data":"3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45"} Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.266481 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.893983134 podStartE2EDuration="2.266461883s" podCreationTimestamp="2026-02-02 17:10:44 +0000 UTC" firstStartedPulling="2026-02-02 17:10:45.438004181 +0000 UTC m=+1237.059608261" lastFinishedPulling="2026-02-02 17:10:45.81048293 +0000 UTC m=+1237.432087010" observedRunningTime="2026-02-02 17:10:46.26562707 +0000 UTC m=+1237.887231160" watchObservedRunningTime="2026-02-02 17:10:46.266461883 +0000 UTC m=+1237.888065963" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.269517 4835 scope.go:117] "RemoveContainer" containerID="cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93" Feb 02 17:10:46 crc kubenswrapper[4835]: E0202 17:10:46.272622 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93\": container with ID starting with cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93 not found: ID does not exist" containerID="cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.272679 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93"} err="failed to get container status \"cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93\": rpc error: code = NotFound desc = could not find container \"cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93\": container with ID starting with cf8c759144f8f557a9aadc80c2c26fd6aeffbb77eb9a821ea44c7008edef6f93 not found: ID does not exist" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.272709 4835 scope.go:117] "RemoveContainer" containerID="7e4b098a256c9db5fe63687cb6fa96cbd3064a47fc70b794ca57602a1589365c" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.304144 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.307301 4835 scope.go:117] "RemoveContainer" containerID="496a1fb2e2d1cf71e3fbfae681b92df9abb9db0338d991ff94293814400ebf2d" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.328598 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.363112 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:10:46 crc kubenswrapper[4835]: E0202 17:10:46.363522 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4389d06-4543-4757-9a9e-3df501e4b228" containerName="nova-scheduler-scheduler" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.363538 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4389d06-4543-4757-9a9e-3df501e4b228" containerName="nova-scheduler-scheduler" Feb 02 17:10:46 crc kubenswrapper[4835]: E0202 17:10:46.363555 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerName="nova-api-api" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.363563 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerName="nova-api-api" Feb 02 17:10:46 crc kubenswrapper[4835]: E0202 17:10:46.363581 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerName="nova-api-log" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.363589 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerName="nova-api-log" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.363772 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerName="nova-api-log" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.363789 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4389d06-4543-4757-9a9e-3df501e4b228" containerName="nova-scheduler-scheduler" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.363800 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" containerName="nova-api-api" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.364429 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.368178 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.376189 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.388311 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.398644 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.407303 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.409049 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.411503 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.418872 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.516574 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46562193-d67c-45b7-97a6-f322c114f91c-logs\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.516627 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-config-data\") pod \"nova-scheduler-0\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.516653 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.516687 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s57wj\" (UniqueName: \"kubernetes.io/projected/1fef1efc-7676-4cc9-b903-7df1ad3f819b-kube-api-access-s57wj\") pod \"nova-scheduler-0\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.516721 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.516763 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-config-data\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.516808 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pspft\" (UniqueName: \"kubernetes.io/projected/46562193-d67c-45b7-97a6-f322c114f91c-kube-api-access-pspft\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.574481 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.574553 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.618532 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pspft\" (UniqueName: \"kubernetes.io/projected/46562193-d67c-45b7-97a6-f322c114f91c-kube-api-access-pspft\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.618663 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46562193-d67c-45b7-97a6-f322c114f91c-logs\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.618696 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-config-data\") pod \"nova-scheduler-0\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.618722 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.618759 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s57wj\" (UniqueName: \"kubernetes.io/projected/1fef1efc-7676-4cc9-b903-7df1ad3f819b-kube-api-access-s57wj\") pod \"nova-scheduler-0\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.618783 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.618819 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-config-data\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.620704 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46562193-d67c-45b7-97a6-f322c114f91c-logs\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.622846 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-config-data\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.622932 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.623526 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-config-data\") pod \"nova-scheduler-0\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.623924 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.643137 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pspft\" (UniqueName: \"kubernetes.io/projected/46562193-d67c-45b7-97a6-f322c114f91c-kube-api-access-pspft\") pod \"nova-api-0\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " pod="openstack/nova-api-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.646381 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s57wj\" (UniqueName: \"kubernetes.io/projected/1fef1efc-7676-4cc9-b903-7df1ad3f819b-kube-api-access-s57wj\") pod \"nova-scheduler-0\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.693669 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 17:10:46 crc kubenswrapper[4835]: I0202 17:10:46.729321 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:10:47 crc kubenswrapper[4835]: W0202 17:10:47.181897 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fef1efc_7676_4cc9_b903_7df1ad3f819b.slice/crio-38323a63666160c00bc9a0eb01170fb4fea428ed26c9435d18c6745ab5b86a43 WatchSource:0}: Error finding container 38323a63666160c00bc9a0eb01170fb4fea428ed26c9435d18c6745ab5b86a43: Status 404 returned error can't find the container with id 38323a63666160c00bc9a0eb01170fb4fea428ed26c9435d18c6745ab5b86a43 Feb 02 17:10:47 crc kubenswrapper[4835]: I0202 17:10:47.184248 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:10:47 crc kubenswrapper[4835]: I0202 17:10:47.203901 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d6a1430-374c-40ad-bab8-dfd817bafe7d" path="/var/lib/kubelet/pods/8d6a1430-374c-40ad-bab8-dfd817bafe7d/volumes" Feb 02 17:10:47 crc kubenswrapper[4835]: I0202 17:10:47.205086 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4389d06-4543-4757-9a9e-3df501e4b228" path="/var/lib/kubelet/pods/d4389d06-4543-4757-9a9e-3df501e4b228/volumes" Feb 02 17:10:47 crc kubenswrapper[4835]: I0202 17:10:47.260199 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:10:47 crc kubenswrapper[4835]: W0202 17:10:47.266393 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46562193_d67c_45b7_97a6_f322c114f91c.slice/crio-9a9795609f8d6d506107e200271d6cec9cedf5fb58378828973f9deedfba5680 WatchSource:0}: Error finding container 9a9795609f8d6d506107e200271d6cec9cedf5fb58378828973f9deedfba5680: Status 404 returned error can't find the container with id 9a9795609f8d6d506107e200271d6cec9cedf5fb58378828973f9deedfba5680 Feb 02 17:10:47 crc kubenswrapper[4835]: I0202 17:10:47.267675 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1fef1efc-7676-4cc9-b903-7df1ad3f819b","Type":"ContainerStarted","Data":"38323a63666160c00bc9a0eb01170fb4fea428ed26c9435d18c6745ab5b86a43"} Feb 02 17:10:48 crc kubenswrapper[4835]: I0202 17:10:48.280626 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1fef1efc-7676-4cc9-b903-7df1ad3f819b","Type":"ContainerStarted","Data":"7ddcb06a8206a807be0c793bd460849c4117af25ea7cbd4300c14eecc1ff774a"} Feb 02 17:10:48 crc kubenswrapper[4835]: I0202 17:10:48.285171 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"46562193-d67c-45b7-97a6-f322c114f91c","Type":"ContainerStarted","Data":"d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b"} Feb 02 17:10:48 crc kubenswrapper[4835]: I0202 17:10:48.285419 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"46562193-d67c-45b7-97a6-f322c114f91c","Type":"ContainerStarted","Data":"7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342"} Feb 02 17:10:48 crc kubenswrapper[4835]: I0202 17:10:48.285515 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"46562193-d67c-45b7-97a6-f322c114f91c","Type":"ContainerStarted","Data":"9a9795609f8d6d506107e200271d6cec9cedf5fb58378828973f9deedfba5680"} Feb 02 17:10:48 crc kubenswrapper[4835]: I0202 17:10:48.304568 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.304550681 podStartE2EDuration="2.304550681s" podCreationTimestamp="2026-02-02 17:10:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:10:48.30240408 +0000 UTC m=+1239.924008170" watchObservedRunningTime="2026-02-02 17:10:48.304550681 +0000 UTC m=+1239.926154761" Feb 02 17:10:48 crc kubenswrapper[4835]: I0202 17:10:48.331877 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.331859335 podStartE2EDuration="2.331859335s" podCreationTimestamp="2026-02-02 17:10:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:10:48.326685178 +0000 UTC m=+1239.948289258" watchObservedRunningTime="2026-02-02 17:10:48.331859335 +0000 UTC m=+1239.953463415" Feb 02 17:10:49 crc kubenswrapper[4835]: I0202 17:10:49.951819 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.089859 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-combined-ca-bundle\") pod \"123189a5-58c2-406b-877a-e39bba484f73\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.089948 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p78kc\" (UniqueName: \"kubernetes.io/projected/123189a5-58c2-406b-877a-e39bba484f73-kube-api-access-p78kc\") pod \"123189a5-58c2-406b-877a-e39bba484f73\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.090028 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-sg-core-conf-yaml\") pod \"123189a5-58c2-406b-877a-e39bba484f73\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.090081 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-run-httpd\") pod \"123189a5-58c2-406b-877a-e39bba484f73\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.090180 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-log-httpd\") pod \"123189a5-58c2-406b-877a-e39bba484f73\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.090220 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-scripts\") pod \"123189a5-58c2-406b-877a-e39bba484f73\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.090260 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-config-data\") pod \"123189a5-58c2-406b-877a-e39bba484f73\" (UID: \"123189a5-58c2-406b-877a-e39bba484f73\") " Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.090890 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "123189a5-58c2-406b-877a-e39bba484f73" (UID: "123189a5-58c2-406b-877a-e39bba484f73"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.091174 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "123189a5-58c2-406b-877a-e39bba484f73" (UID: "123189a5-58c2-406b-877a-e39bba484f73"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.096114 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-scripts" (OuterVolumeSpecName: "scripts") pod "123189a5-58c2-406b-877a-e39bba484f73" (UID: "123189a5-58c2-406b-877a-e39bba484f73"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.100991 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/123189a5-58c2-406b-877a-e39bba484f73-kube-api-access-p78kc" (OuterVolumeSpecName: "kube-api-access-p78kc") pod "123189a5-58c2-406b-877a-e39bba484f73" (UID: "123189a5-58c2-406b-877a-e39bba484f73"). InnerVolumeSpecName "kube-api-access-p78kc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.139704 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "123189a5-58c2-406b-877a-e39bba484f73" (UID: "123189a5-58c2-406b-877a-e39bba484f73"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.171312 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "123189a5-58c2-406b-877a-e39bba484f73" (UID: "123189a5-58c2-406b-877a-e39bba484f73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.192488 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p78kc\" (UniqueName: \"kubernetes.io/projected/123189a5-58c2-406b-877a-e39bba484f73-kube-api-access-p78kc\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.192527 4835 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.192538 4835 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.192548 4835 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123189a5-58c2-406b-877a-e39bba484f73-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.192561 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.192570 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.194868 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-config-data" (OuterVolumeSpecName: "config-data") pod "123189a5-58c2-406b-877a-e39bba484f73" (UID: "123189a5-58c2-406b-877a-e39bba484f73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.294550 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/123189a5-58c2-406b-877a-e39bba484f73-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.306375 4835 generic.go:334] "Generic (PLEG): container finished" podID="123189a5-58c2-406b-877a-e39bba484f73" containerID="f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27" exitCode=0 Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.306450 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.306454 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123189a5-58c2-406b-877a-e39bba484f73","Type":"ContainerDied","Data":"f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27"} Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.306507 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123189a5-58c2-406b-877a-e39bba484f73","Type":"ContainerDied","Data":"3efe3935b862befc810fb9450de95761aef52c6070a24698792b753bb18ad7fa"} Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.306529 4835 scope.go:117] "RemoveContainer" containerID="c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.334636 4835 scope.go:117] "RemoveContainer" containerID="5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.338501 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.347420 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.358472 4835 scope.go:117] "RemoveContainer" containerID="f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.369152 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:50 crc kubenswrapper[4835]: E0202 17:10:50.369627 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="ceilometer-notification-agent" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.369651 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="ceilometer-notification-agent" Feb 02 17:10:50 crc kubenswrapper[4835]: E0202 17:10:50.369668 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="sg-core" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.369677 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="sg-core" Feb 02 17:10:50 crc kubenswrapper[4835]: E0202 17:10:50.369688 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="ceilometer-central-agent" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.369694 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="ceilometer-central-agent" Feb 02 17:10:50 crc kubenswrapper[4835]: E0202 17:10:50.369728 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="proxy-httpd" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.369735 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="proxy-httpd" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.369919 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="proxy-httpd" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.369945 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="sg-core" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.369961 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="ceilometer-central-agent" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.369977 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="123189a5-58c2-406b-877a-e39bba484f73" containerName="ceilometer-notification-agent" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.372125 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.375893 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.375945 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.376833 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.385424 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.400182 4835 scope.go:117] "RemoveContainer" containerID="3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.420807 4835 scope.go:117] "RemoveContainer" containerID="c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66" Feb 02 17:10:50 crc kubenswrapper[4835]: E0202 17:10:50.421478 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66\": container with ID starting with c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66 not found: ID does not exist" containerID="c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.421511 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66"} err="failed to get container status \"c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66\": rpc error: code = NotFound desc = could not find container \"c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66\": container with ID starting with c69f023d0b182f7dcb19df28aae561d139d1da8c214c905c38e8e3a92a3a9f66 not found: ID does not exist" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.421532 4835 scope.go:117] "RemoveContainer" containerID="5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02" Feb 02 17:10:50 crc kubenswrapper[4835]: E0202 17:10:50.421728 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02\": container with ID starting with 5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02 not found: ID does not exist" containerID="5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.421742 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02"} err="failed to get container status \"5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02\": rpc error: code = NotFound desc = could not find container \"5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02\": container with ID starting with 5f63cfe2dea56829230c402f1f31001da0c7203191d5b7f09db8232418efec02 not found: ID does not exist" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.421753 4835 scope.go:117] "RemoveContainer" containerID="f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27" Feb 02 17:10:50 crc kubenswrapper[4835]: E0202 17:10:50.422168 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27\": container with ID starting with f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27 not found: ID does not exist" containerID="f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.422191 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27"} err="failed to get container status \"f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27\": rpc error: code = NotFound desc = could not find container \"f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27\": container with ID starting with f75538860a9f409e2e0d3cadc0e21edd3b77364d04b063f58e8e4b30a83c4a27 not found: ID does not exist" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.422206 4835 scope.go:117] "RemoveContainer" containerID="3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45" Feb 02 17:10:50 crc kubenswrapper[4835]: E0202 17:10:50.422478 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45\": container with ID starting with 3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45 not found: ID does not exist" containerID="3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.422504 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45"} err="failed to get container status \"3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45\": rpc error: code = NotFound desc = could not find container \"3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45\": container with ID starting with 3c928d3b5a7d2f24ed287c8f39d59013ee2548fe44b990f7aaa682bc15067c45 not found: ID does not exist" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.498442 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgr24\" (UniqueName: \"kubernetes.io/projected/f083bf49-2c28-4e75-8fe2-8210345db7df-kube-api-access-qgr24\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.498795 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.498936 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-scripts\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.499087 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.499195 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-run-httpd\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.499508 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.499659 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-log-httpd\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.499800 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-config-data\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.601843 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-config-data\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.601963 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgr24\" (UniqueName: \"kubernetes.io/projected/f083bf49-2c28-4e75-8fe2-8210345db7df-kube-api-access-qgr24\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.602011 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.602042 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-scripts\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.602104 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.602126 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-run-httpd\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.602157 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.602218 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-log-httpd\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.602891 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-run-httpd\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.602956 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-log-httpd\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.606718 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-scripts\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.606894 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.607943 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.608724 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.608961 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-config-data\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.625540 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgr24\" (UniqueName: \"kubernetes.io/projected/f083bf49-2c28-4e75-8fe2-8210345db7df-kube-api-access-qgr24\") pod \"ceilometer-0\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " pod="openstack/ceilometer-0" Feb 02 17:10:50 crc kubenswrapper[4835]: I0202 17:10:50.692787 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:10:51 crc kubenswrapper[4835]: I0202 17:10:51.198686 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="123189a5-58c2-406b-877a-e39bba484f73" path="/var/lib/kubelet/pods/123189a5-58c2-406b-877a-e39bba484f73/volumes" Feb 02 17:10:51 crc kubenswrapper[4835]: I0202 17:10:51.200367 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:10:51 crc kubenswrapper[4835]: I0202 17:10:51.318444 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f083bf49-2c28-4e75-8fe2-8210345db7df","Type":"ContainerStarted","Data":"02387692a2e7a66515bcf238df06834cea41d4badd7baa560c83472f4d998708"} Feb 02 17:10:51 crc kubenswrapper[4835]: I0202 17:10:51.574780 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 17:10:51 crc kubenswrapper[4835]: I0202 17:10:51.574858 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 17:10:51 crc kubenswrapper[4835]: I0202 17:10:51.670003 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 02 17:10:51 crc kubenswrapper[4835]: I0202 17:10:51.694699 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 17:10:52 crc kubenswrapper[4835]: I0202 17:10:52.333514 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f083bf49-2c28-4e75-8fe2-8210345db7df","Type":"ContainerStarted","Data":"732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590"} Feb 02 17:10:52 crc kubenswrapper[4835]: I0202 17:10:52.591236 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.182:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 17:10:52 crc kubenswrapper[4835]: I0202 17:10:52.591298 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.182:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 17:10:53 crc kubenswrapper[4835]: I0202 17:10:53.343531 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f083bf49-2c28-4e75-8fe2-8210345db7df","Type":"ContainerStarted","Data":"182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a"} Feb 02 17:10:54 crc kubenswrapper[4835]: I0202 17:10:54.355022 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f083bf49-2c28-4e75-8fe2-8210345db7df","Type":"ContainerStarted","Data":"6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386"} Feb 02 17:10:54 crc kubenswrapper[4835]: I0202 17:10:54.879750 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 02 17:10:56 crc kubenswrapper[4835]: I0202 17:10:56.378393 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f083bf49-2c28-4e75-8fe2-8210345db7df","Type":"ContainerStarted","Data":"97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a"} Feb 02 17:10:56 crc kubenswrapper[4835]: I0202 17:10:56.380000 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 17:10:56 crc kubenswrapper[4835]: I0202 17:10:56.407649 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.649794108 podStartE2EDuration="6.407629159s" podCreationTimestamp="2026-02-02 17:10:50 +0000 UTC" firstStartedPulling="2026-02-02 17:10:51.185103128 +0000 UTC m=+1242.806707208" lastFinishedPulling="2026-02-02 17:10:55.942938179 +0000 UTC m=+1247.564542259" observedRunningTime="2026-02-02 17:10:56.398149061 +0000 UTC m=+1248.019753141" watchObservedRunningTime="2026-02-02 17:10:56.407629159 +0000 UTC m=+1248.029233239" Feb 02 17:10:56 crc kubenswrapper[4835]: I0202 17:10:56.694101 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 17:10:56 crc kubenswrapper[4835]: I0202 17:10:56.719692 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 17:10:56 crc kubenswrapper[4835]: I0202 17:10:56.730032 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 17:10:56 crc kubenswrapper[4835]: I0202 17:10:56.761375 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 17:10:57 crc kubenswrapper[4835]: I0202 17:10:57.426213 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 17:10:57 crc kubenswrapper[4835]: I0202 17:10:57.814459 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="46562193-d67c-45b7-97a6-f322c114f91c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.186:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 17:10:57 crc kubenswrapper[4835]: I0202 17:10:57.814447 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="46562193-d67c-45b7-97a6-f322c114f91c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.186:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 17:11:01 crc kubenswrapper[4835]: I0202 17:11:01.579448 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 17:11:01 crc kubenswrapper[4835]: I0202 17:11:01.580133 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 17:11:01 crc kubenswrapper[4835]: I0202 17:11:01.584726 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 17:11:02 crc kubenswrapper[4835]: I0202 17:11:02.436863 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.420350 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.462226 4835 generic.go:334] "Generic (PLEG): container finished" podID="645a2937-2612-4a25-a28b-c77bf1453d7f" containerID="cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be" exitCode=137 Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.462268 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"645a2937-2612-4a25-a28b-c77bf1453d7f","Type":"ContainerDied","Data":"cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be"} Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.462306 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"645a2937-2612-4a25-a28b-c77bf1453d7f","Type":"ContainerDied","Data":"e91f2ded022489a2142543f66e48d303f72639d874f7935a1586528c91c99efd"} Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.462313 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.462323 4835 scope.go:117] "RemoveContainer" containerID="cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.493669 4835 scope.go:117] "RemoveContainer" containerID="cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be" Feb 02 17:11:05 crc kubenswrapper[4835]: E0202 17:11:05.494583 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be\": container with ID starting with cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be not found: ID does not exist" containerID="cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.494623 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be"} err="failed to get container status \"cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be\": rpc error: code = NotFound desc = could not find container \"cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be\": container with ID starting with cb404e564fbfd20359de19154384798a92935e300a4b20cf4ad9313e359096be not found: ID does not exist" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.510162 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-config-data\") pod \"645a2937-2612-4a25-a28b-c77bf1453d7f\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.510250 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx92x\" (UniqueName: \"kubernetes.io/projected/645a2937-2612-4a25-a28b-c77bf1453d7f-kube-api-access-jx92x\") pod \"645a2937-2612-4a25-a28b-c77bf1453d7f\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.510483 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-combined-ca-bundle\") pod \"645a2937-2612-4a25-a28b-c77bf1453d7f\" (UID: \"645a2937-2612-4a25-a28b-c77bf1453d7f\") " Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.516313 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/645a2937-2612-4a25-a28b-c77bf1453d7f-kube-api-access-jx92x" (OuterVolumeSpecName: "kube-api-access-jx92x") pod "645a2937-2612-4a25-a28b-c77bf1453d7f" (UID: "645a2937-2612-4a25-a28b-c77bf1453d7f"). InnerVolumeSpecName "kube-api-access-jx92x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.543606 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-config-data" (OuterVolumeSpecName: "config-data") pod "645a2937-2612-4a25-a28b-c77bf1453d7f" (UID: "645a2937-2612-4a25-a28b-c77bf1453d7f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.546671 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "645a2937-2612-4a25-a28b-c77bf1453d7f" (UID: "645a2937-2612-4a25-a28b-c77bf1453d7f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.612738 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.612770 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx92x\" (UniqueName: \"kubernetes.io/projected/645a2937-2612-4a25-a28b-c77bf1453d7f-kube-api-access-jx92x\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.612783 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/645a2937-2612-4a25-a28b-c77bf1453d7f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.798505 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.808730 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.820315 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 17:11:05 crc kubenswrapper[4835]: E0202 17:11:05.820763 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="645a2937-2612-4a25-a28b-c77bf1453d7f" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.820787 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="645a2937-2612-4a25-a28b-c77bf1453d7f" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.821005 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="645a2937-2612-4a25-a28b-c77bf1453d7f" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.823911 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.826683 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.827081 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.833323 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.839168 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.918006 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.918054 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.918248 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65p47\" (UniqueName: \"kubernetes.io/projected/dde1c61e-1816-44bc-b1bc-9e1545987087-kube-api-access-65p47\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.918302 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:05 crc kubenswrapper[4835]: I0202 17:11:05.918330 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.020071 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.020113 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.020234 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65p47\" (UniqueName: \"kubernetes.io/projected/dde1c61e-1816-44bc-b1bc-9e1545987087-kube-api-access-65p47\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.020259 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.020300 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.024486 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.025057 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.026192 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.028071 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dde1c61e-1816-44bc-b1bc-9e1545987087-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.038862 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65p47\" (UniqueName: \"kubernetes.io/projected/dde1c61e-1816-44bc-b1bc-9e1545987087-kube-api-access-65p47\") pod \"nova-cell1-novncproxy-0\" (UID: \"dde1c61e-1816-44bc-b1bc-9e1545987087\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.162500 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.617977 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.734142 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.734832 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.735217 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.735243 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.738007 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.738177 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.932431 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-k4zxs"] Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.935684 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:06 crc kubenswrapper[4835]: I0202 17:11:06.951402 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-k4zxs"] Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.037490 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-config\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.037551 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.037589 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.037636 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56rp6\" (UniqueName: \"kubernetes.io/projected/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-kube-api-access-56rp6\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.037697 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.139018 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.139118 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-config\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.139161 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.139184 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.139209 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56rp6\" (UniqueName: \"kubernetes.io/projected/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-kube-api-access-56rp6\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.140501 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.140585 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-config\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.141071 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.142251 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.157464 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56rp6\" (UniqueName: \"kubernetes.io/projected/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-kube-api-access-56rp6\") pod \"dnsmasq-dns-68d4b6d797-k4zxs\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.198729 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="645a2937-2612-4a25-a28b-c77bf1453d7f" path="/var/lib/kubelet/pods/645a2937-2612-4a25-a28b-c77bf1453d7f/volumes" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.258412 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.497158 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dde1c61e-1816-44bc-b1bc-9e1545987087","Type":"ContainerStarted","Data":"021c467ead3ca260bdd2c647d80096623d40f6a1ccd5491b51a4056fb46f1928"} Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.497219 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dde1c61e-1816-44bc-b1bc-9e1545987087","Type":"ContainerStarted","Data":"9ce6cbdd07135e77c594ff0fa131b42b2c253381659591f05519dfee32ed9534"} Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.531569 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.531547845 podStartE2EDuration="2.531547845s" podCreationTimestamp="2026-02-02 17:11:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:11:07.516687974 +0000 UTC m=+1259.138292074" watchObservedRunningTime="2026-02-02 17:11:07.531547845 +0000 UTC m=+1259.153151925" Feb 02 17:11:07 crc kubenswrapper[4835]: I0202 17:11:07.774077 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-k4zxs"] Feb 02 17:11:08 crc kubenswrapper[4835]: I0202 17:11:08.507771 4835 generic.go:334] "Generic (PLEG): container finished" podID="95e4982e-b72d-4b5e-9c32-cbda57e9d23b" containerID="83ba2b8c03ea8252292c077d3b16456a34e3f5aed4e984d9e9b3e75ae79c4a10" exitCode=0 Feb 02 17:11:08 crc kubenswrapper[4835]: I0202 17:11:08.508422 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" event={"ID":"95e4982e-b72d-4b5e-9c32-cbda57e9d23b","Type":"ContainerDied","Data":"83ba2b8c03ea8252292c077d3b16456a34e3f5aed4e984d9e9b3e75ae79c4a10"} Feb 02 17:11:08 crc kubenswrapper[4835]: I0202 17:11:08.508557 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" event={"ID":"95e4982e-b72d-4b5e-9c32-cbda57e9d23b","Type":"ContainerStarted","Data":"999fb1c4d55e8be2f36e97293d232e30de81f5ea4eae768ef8d4c00ee95eb6ca"} Feb 02 17:11:09 crc kubenswrapper[4835]: I0202 17:11:09.211693 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:09 crc kubenswrapper[4835]: I0202 17:11:09.468791 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:11:09 crc kubenswrapper[4835]: I0202 17:11:09.469141 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="ceilometer-central-agent" containerID="cri-o://732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590" gracePeriod=30 Feb 02 17:11:09 crc kubenswrapper[4835]: I0202 17:11:09.469268 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="ceilometer-notification-agent" containerID="cri-o://182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a" gracePeriod=30 Feb 02 17:11:09 crc kubenswrapper[4835]: I0202 17:11:09.469246 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="sg-core" containerID="cri-o://6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386" gracePeriod=30 Feb 02 17:11:09 crc kubenswrapper[4835]: I0202 17:11:09.469426 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="proxy-httpd" containerID="cri-o://97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a" gracePeriod=30 Feb 02 17:11:09 crc kubenswrapper[4835]: I0202 17:11:09.520234 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" event={"ID":"95e4982e-b72d-4b5e-9c32-cbda57e9d23b","Type":"ContainerStarted","Data":"c5f663722c1dc0dc104ee1222f1650be13c2c806f554ad72864793c60c7b8159"} Feb 02 17:11:09 crc kubenswrapper[4835]: I0202 17:11:09.520386 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="46562193-d67c-45b7-97a6-f322c114f91c" containerName="nova-api-log" containerID="cri-o://7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342" gracePeriod=30 Feb 02 17:11:09 crc kubenswrapper[4835]: I0202 17:11:09.520463 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="46562193-d67c-45b7-97a6-f322c114f91c" containerName="nova-api-api" containerID="cri-o://d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b" gracePeriod=30 Feb 02 17:11:09 crc kubenswrapper[4835]: I0202 17:11:09.544692 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" podStartSLOduration=3.544671166 podStartE2EDuration="3.544671166s" podCreationTimestamp="2026-02-02 17:11:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:11:09.539464799 +0000 UTC m=+1261.161068889" watchObservedRunningTime="2026-02-02 17:11:09.544671166 +0000 UTC m=+1261.166275246" Feb 02 17:11:09 crc kubenswrapper[4835]: I0202 17:11:09.571300 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.187:3000/\": read tcp 10.217.0.2:34010->10.217.0.187:3000: read: connection reset by peer" Feb 02 17:11:10 crc kubenswrapper[4835]: I0202 17:11:10.531694 4835 generic.go:334] "Generic (PLEG): container finished" podID="46562193-d67c-45b7-97a6-f322c114f91c" containerID="7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342" exitCode=143 Feb 02 17:11:10 crc kubenswrapper[4835]: I0202 17:11:10.531787 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"46562193-d67c-45b7-97a6-f322c114f91c","Type":"ContainerDied","Data":"7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342"} Feb 02 17:11:10 crc kubenswrapper[4835]: I0202 17:11:10.534861 4835 generic.go:334] "Generic (PLEG): container finished" podID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerID="97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a" exitCode=0 Feb 02 17:11:10 crc kubenswrapper[4835]: I0202 17:11:10.534894 4835 generic.go:334] "Generic (PLEG): container finished" podID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerID="6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386" exitCode=2 Feb 02 17:11:10 crc kubenswrapper[4835]: I0202 17:11:10.534901 4835 generic.go:334] "Generic (PLEG): container finished" podID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerID="732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590" exitCode=0 Feb 02 17:11:10 crc kubenswrapper[4835]: I0202 17:11:10.534910 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f083bf49-2c28-4e75-8fe2-8210345db7df","Type":"ContainerDied","Data":"97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a"} Feb 02 17:11:10 crc kubenswrapper[4835]: I0202 17:11:10.534962 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f083bf49-2c28-4e75-8fe2-8210345db7df","Type":"ContainerDied","Data":"6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386"} Feb 02 17:11:10 crc kubenswrapper[4835]: I0202 17:11:10.535168 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f083bf49-2c28-4e75-8fe2-8210345db7df","Type":"ContainerDied","Data":"732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590"} Feb 02 17:11:10 crc kubenswrapper[4835]: I0202 17:11:10.535463 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:11 crc kubenswrapper[4835]: I0202 17:11:11.162964 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.108053 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.275150 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46562193-d67c-45b7-97a6-f322c114f91c-logs\") pod \"46562193-d67c-45b7-97a6-f322c114f91c\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.275211 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pspft\" (UniqueName: \"kubernetes.io/projected/46562193-d67c-45b7-97a6-f322c114f91c-kube-api-access-pspft\") pod \"46562193-d67c-45b7-97a6-f322c114f91c\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.275406 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-combined-ca-bundle\") pod \"46562193-d67c-45b7-97a6-f322c114f91c\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.275515 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-config-data\") pod \"46562193-d67c-45b7-97a6-f322c114f91c\" (UID: \"46562193-d67c-45b7-97a6-f322c114f91c\") " Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.277078 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46562193-d67c-45b7-97a6-f322c114f91c-logs" (OuterVolumeSpecName: "logs") pod "46562193-d67c-45b7-97a6-f322c114f91c" (UID: "46562193-d67c-45b7-97a6-f322c114f91c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.282672 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46562193-d67c-45b7-97a6-f322c114f91c-kube-api-access-pspft" (OuterVolumeSpecName: "kube-api-access-pspft") pod "46562193-d67c-45b7-97a6-f322c114f91c" (UID: "46562193-d67c-45b7-97a6-f322c114f91c"). InnerVolumeSpecName "kube-api-access-pspft". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.310877 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-config-data" (OuterVolumeSpecName: "config-data") pod "46562193-d67c-45b7-97a6-f322c114f91c" (UID: "46562193-d67c-45b7-97a6-f322c114f91c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.315558 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46562193-d67c-45b7-97a6-f322c114f91c" (UID: "46562193-d67c-45b7-97a6-f322c114f91c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.377415 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46562193-d67c-45b7-97a6-f322c114f91c-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.377443 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pspft\" (UniqueName: \"kubernetes.io/projected/46562193-d67c-45b7-97a6-f322c114f91c-kube-api-access-pspft\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.377455 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.377463 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46562193-d67c-45b7-97a6-f322c114f91c-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.562883 4835 generic.go:334] "Generic (PLEG): container finished" podID="46562193-d67c-45b7-97a6-f322c114f91c" containerID="d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b" exitCode=0 Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.562922 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"46562193-d67c-45b7-97a6-f322c114f91c","Type":"ContainerDied","Data":"d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b"} Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.562947 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"46562193-d67c-45b7-97a6-f322c114f91c","Type":"ContainerDied","Data":"9a9795609f8d6d506107e200271d6cec9cedf5fb58378828973f9deedfba5680"} Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.562963 4835 scope.go:117] "RemoveContainer" containerID="d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.563078 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.586522 4835 scope.go:117] "RemoveContainer" containerID="7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.596555 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.606852 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.609344 4835 scope.go:117] "RemoveContainer" containerID="d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b" Feb 02 17:11:13 crc kubenswrapper[4835]: E0202 17:11:13.610012 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b\": container with ID starting with d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b not found: ID does not exist" containerID="d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.610063 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b"} err="failed to get container status \"d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b\": rpc error: code = NotFound desc = could not find container \"d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b\": container with ID starting with d2ba7bca7bb7554f2c0ace55d85a0edc073da28cb9cb19052f6bfb7b20d11f3b not found: ID does not exist" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.610096 4835 scope.go:117] "RemoveContainer" containerID="7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342" Feb 02 17:11:13 crc kubenswrapper[4835]: E0202 17:11:13.610548 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342\": container with ID starting with 7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342 not found: ID does not exist" containerID="7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.610587 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342"} err="failed to get container status \"7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342\": rpc error: code = NotFound desc = could not find container \"7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342\": container with ID starting with 7072518293c86d47d0974e48ca8ceef2ab145b418db229173421fd8b3604b342 not found: ID does not exist" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.628766 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:13 crc kubenswrapper[4835]: E0202 17:11:13.629947 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46562193-d67c-45b7-97a6-f322c114f91c" containerName="nova-api-log" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.629976 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="46562193-d67c-45b7-97a6-f322c114f91c" containerName="nova-api-log" Feb 02 17:11:13 crc kubenswrapper[4835]: E0202 17:11:13.630024 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46562193-d67c-45b7-97a6-f322c114f91c" containerName="nova-api-api" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.630034 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="46562193-d67c-45b7-97a6-f322c114f91c" containerName="nova-api-api" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.630221 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="46562193-d67c-45b7-97a6-f322c114f91c" containerName="nova-api-log" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.630256 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="46562193-d67c-45b7-97a6-f322c114f91c" containerName="nova-api-api" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.631214 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.633396 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.633775 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.635570 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.645613 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.784471 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b706cf55-d931-4ac1-83a7-37ccf678bceb-logs\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.784884 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l25df\" (UniqueName: \"kubernetes.io/projected/b706cf55-d931-4ac1-83a7-37ccf678bceb-kube-api-access-l25df\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.784908 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-public-tls-certs\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.784940 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-config-data\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.784986 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.785008 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.886667 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b706cf55-d931-4ac1-83a7-37ccf678bceb-logs\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.886803 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l25df\" (UniqueName: \"kubernetes.io/projected/b706cf55-d931-4ac1-83a7-37ccf678bceb-kube-api-access-l25df\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.886836 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-public-tls-certs\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.886888 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-config-data\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.886963 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.886993 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.887424 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b706cf55-d931-4ac1-83a7-37ccf678bceb-logs\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.891942 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.892015 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.892641 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-public-tls-certs\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.893177 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-config-data\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.904076 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l25df\" (UniqueName: \"kubernetes.io/projected/b706cf55-d931-4ac1-83a7-37ccf678bceb-kube-api-access-l25df\") pod \"nova-api-0\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " pod="openstack/nova-api-0" Feb 02 17:11:13 crc kubenswrapper[4835]: I0202 17:11:13.947729 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.102936 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.195679 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-combined-ca-bundle\") pod \"f083bf49-2c28-4e75-8fe2-8210345db7df\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.196514 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-sg-core-conf-yaml\") pod \"f083bf49-2c28-4e75-8fe2-8210345db7df\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.196576 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-scripts\") pod \"f083bf49-2c28-4e75-8fe2-8210345db7df\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.196616 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgr24\" (UniqueName: \"kubernetes.io/projected/f083bf49-2c28-4e75-8fe2-8210345db7df-kube-api-access-qgr24\") pod \"f083bf49-2c28-4e75-8fe2-8210345db7df\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.196649 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-ceilometer-tls-certs\") pod \"f083bf49-2c28-4e75-8fe2-8210345db7df\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.197243 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-log-httpd\") pod \"f083bf49-2c28-4e75-8fe2-8210345db7df\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.197303 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-run-httpd\") pod \"f083bf49-2c28-4e75-8fe2-8210345db7df\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.197461 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-config-data\") pod \"f083bf49-2c28-4e75-8fe2-8210345db7df\" (UID: \"f083bf49-2c28-4e75-8fe2-8210345db7df\") " Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.198875 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f083bf49-2c28-4e75-8fe2-8210345db7df" (UID: "f083bf49-2c28-4e75-8fe2-8210345db7df"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.199103 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f083bf49-2c28-4e75-8fe2-8210345db7df" (UID: "f083bf49-2c28-4e75-8fe2-8210345db7df"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.203822 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f083bf49-2c28-4e75-8fe2-8210345db7df-kube-api-access-qgr24" (OuterVolumeSpecName: "kube-api-access-qgr24") pod "f083bf49-2c28-4e75-8fe2-8210345db7df" (UID: "f083bf49-2c28-4e75-8fe2-8210345db7df"). InnerVolumeSpecName "kube-api-access-qgr24". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.222066 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-scripts" (OuterVolumeSpecName: "scripts") pod "f083bf49-2c28-4e75-8fe2-8210345db7df" (UID: "f083bf49-2c28-4e75-8fe2-8210345db7df"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.224132 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.224164 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgr24\" (UniqueName: \"kubernetes.io/projected/f083bf49-2c28-4e75-8fe2-8210345db7df-kube-api-access-qgr24\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.224175 4835 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.224184 4835 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f083bf49-2c28-4e75-8fe2-8210345db7df-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.233419 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f083bf49-2c28-4e75-8fe2-8210345db7df" (UID: "f083bf49-2c28-4e75-8fe2-8210345db7df"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.262875 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f083bf49-2c28-4e75-8fe2-8210345db7df" (UID: "f083bf49-2c28-4e75-8fe2-8210345db7df"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.282092 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f083bf49-2c28-4e75-8fe2-8210345db7df" (UID: "f083bf49-2c28-4e75-8fe2-8210345db7df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.312537 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-config-data" (OuterVolumeSpecName: "config-data") pod "f083bf49-2c28-4e75-8fe2-8210345db7df" (UID: "f083bf49-2c28-4e75-8fe2-8210345db7df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.325447 4835 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.325478 4835 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.325492 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.325503 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f083bf49-2c28-4e75-8fe2-8210345db7df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.413810 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:14 crc kubenswrapper[4835]: W0202 17:11:14.418661 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb706cf55_d931_4ac1_83a7_37ccf678bceb.slice/crio-9ed628909f8de530aa46b731ce4dc269689a4ef9b979486f391828b1fdc5b2f8 WatchSource:0}: Error finding container 9ed628909f8de530aa46b731ce4dc269689a4ef9b979486f391828b1fdc5b2f8: Status 404 returned error can't find the container with id 9ed628909f8de530aa46b731ce4dc269689a4ef9b979486f391828b1fdc5b2f8 Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.573188 4835 generic.go:334] "Generic (PLEG): container finished" podID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerID="182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a" exitCode=0 Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.573327 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f083bf49-2c28-4e75-8fe2-8210345db7df","Type":"ContainerDied","Data":"182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a"} Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.573549 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f083bf49-2c28-4e75-8fe2-8210345db7df","Type":"ContainerDied","Data":"02387692a2e7a66515bcf238df06834cea41d4badd7baa560c83472f4d998708"} Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.573568 4835 scope.go:117] "RemoveContainer" containerID="97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.573432 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.584529 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b706cf55-d931-4ac1-83a7-37ccf678bceb","Type":"ContainerStarted","Data":"9ed628909f8de530aa46b731ce4dc269689a4ef9b979486f391828b1fdc5b2f8"} Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.605690 4835 scope.go:117] "RemoveContainer" containerID="6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.620644 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.640064 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.652834 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:11:14 crc kubenswrapper[4835]: E0202 17:11:14.653207 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="ceilometer-central-agent" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.653222 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="ceilometer-central-agent" Feb 02 17:11:14 crc kubenswrapper[4835]: E0202 17:11:14.653235 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="ceilometer-notification-agent" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.653241 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="ceilometer-notification-agent" Feb 02 17:11:14 crc kubenswrapper[4835]: E0202 17:11:14.653263 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="sg-core" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.653268 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="sg-core" Feb 02 17:11:14 crc kubenswrapper[4835]: E0202 17:11:14.653291 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="proxy-httpd" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.653296 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="proxy-httpd" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.653478 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="sg-core" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.653489 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="ceilometer-central-agent" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.653502 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="ceilometer-notification-agent" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.653511 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" containerName="proxy-httpd" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.654973 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.658121 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.658441 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.658634 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.660632 4835 scope.go:117] "RemoveContainer" containerID="182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.661089 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.694926 4835 scope.go:117] "RemoveContainer" containerID="732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.721150 4835 scope.go:117] "RemoveContainer" containerID="97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a" Feb 02 17:11:14 crc kubenswrapper[4835]: E0202 17:11:14.721755 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a\": container with ID starting with 97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a not found: ID does not exist" containerID="97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.721791 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a"} err="failed to get container status \"97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a\": rpc error: code = NotFound desc = could not find container \"97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a\": container with ID starting with 97373b38d04093239ba74951a413319c49c72e69c4ee86aa78ecca2fba51315a not found: ID does not exist" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.721834 4835 scope.go:117] "RemoveContainer" containerID="6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386" Feb 02 17:11:14 crc kubenswrapper[4835]: E0202 17:11:14.722158 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386\": container with ID starting with 6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386 not found: ID does not exist" containerID="6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.722183 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386"} err="failed to get container status \"6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386\": rpc error: code = NotFound desc = could not find container \"6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386\": container with ID starting with 6d7252995028174f71c306dddfc7128ca8a1362c81b34f73359886a89ef53386 not found: ID does not exist" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.722196 4835 scope.go:117] "RemoveContainer" containerID="182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a" Feb 02 17:11:14 crc kubenswrapper[4835]: E0202 17:11:14.722490 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a\": container with ID starting with 182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a not found: ID does not exist" containerID="182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.722535 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a"} err="failed to get container status \"182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a\": rpc error: code = NotFound desc = could not find container \"182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a\": container with ID starting with 182b7cf6bee6f071743d1deb1f42ed99a0f8b7533e37cd56862ba0b32679e81a not found: ID does not exist" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.722569 4835 scope.go:117] "RemoveContainer" containerID="732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590" Feb 02 17:11:14 crc kubenswrapper[4835]: E0202 17:11:14.722921 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590\": container with ID starting with 732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590 not found: ID does not exist" containerID="732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.722948 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590"} err="failed to get container status \"732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590\": rpc error: code = NotFound desc = could not find container \"732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590\": container with ID starting with 732a2c399e5ac8c3120191ffc4d79813723ea35dde9a8a639f9f5c19c1320590 not found: ID does not exist" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.732450 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.732531 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-log-httpd\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.732579 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-run-httpd\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.732607 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.732674 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.732719 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-scripts\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.732740 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2tdv\" (UniqueName: \"kubernetes.io/projected/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-kube-api-access-v2tdv\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.732909 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-config-data\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.833834 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.833884 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-scripts\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.833902 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2tdv\" (UniqueName: \"kubernetes.io/projected/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-kube-api-access-v2tdv\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.833943 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-config-data\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.833971 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.834012 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-log-httpd\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.834042 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-run-httpd\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.834063 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.835120 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-log-httpd\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.835347 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-run-httpd\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.838895 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.839374 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.840112 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-scripts\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.851209 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-config-data\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.851418 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.855379 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2tdv\" (UniqueName: \"kubernetes.io/projected/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-kube-api-access-v2tdv\") pod \"ceilometer-0\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " pod="openstack/ceilometer-0" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.872154 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.872215 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.872290 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.873052 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d1d745ca83c0b5216f384fd386fa76fd9b97cc7c8d5d53ff568a50a85b837b86"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:11:14 crc kubenswrapper[4835]: I0202 17:11:14.873116 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://d1d745ca83c0b5216f384fd386fa76fd9b97cc7c8d5d53ff568a50a85b837b86" gracePeriod=600 Feb 02 17:11:15 crc kubenswrapper[4835]: I0202 17:11:15.012343 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:11:15 crc kubenswrapper[4835]: I0202 17:11:15.202998 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46562193-d67c-45b7-97a6-f322c114f91c" path="/var/lib/kubelet/pods/46562193-d67c-45b7-97a6-f322c114f91c/volumes" Feb 02 17:11:15 crc kubenswrapper[4835]: I0202 17:11:15.204515 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f083bf49-2c28-4e75-8fe2-8210345db7df" path="/var/lib/kubelet/pods/f083bf49-2c28-4e75-8fe2-8210345db7df/volumes" Feb 02 17:11:15 crc kubenswrapper[4835]: I0202 17:11:15.457554 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:11:15 crc kubenswrapper[4835]: W0202 17:11:15.469073 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ef884a8_061e_4f93_b4c9_9149a5f10f9f.slice/crio-56c3fc9a64700931866c158be2d0c0153dd5c4e2ae380270634a8562ba56a722 WatchSource:0}: Error finding container 56c3fc9a64700931866c158be2d0c0153dd5c4e2ae380270634a8562ba56a722: Status 404 returned error can't find the container with id 56c3fc9a64700931866c158be2d0c0153dd5c4e2ae380270634a8562ba56a722 Feb 02 17:11:15 crc kubenswrapper[4835]: I0202 17:11:15.606594 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="d1d745ca83c0b5216f384fd386fa76fd9b97cc7c8d5d53ff568a50a85b837b86" exitCode=0 Feb 02 17:11:15 crc kubenswrapper[4835]: I0202 17:11:15.606662 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"d1d745ca83c0b5216f384fd386fa76fd9b97cc7c8d5d53ff568a50a85b837b86"} Feb 02 17:11:15 crc kubenswrapper[4835]: I0202 17:11:15.606690 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"50373b29385bc4901c51a5e8702a6916b31f719329b1f21a631ab633cc9521bd"} Feb 02 17:11:15 crc kubenswrapper[4835]: I0202 17:11:15.606706 4835 scope.go:117] "RemoveContainer" containerID="ed5bb6b3343a006060ae2f0f9c428cf6f417413f7227d48031553b98961dab3a" Feb 02 17:11:15 crc kubenswrapper[4835]: I0202 17:11:15.608618 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ef884a8-061e-4f93-b4c9-9149a5f10f9f","Type":"ContainerStarted","Data":"56c3fc9a64700931866c158be2d0c0153dd5c4e2ae380270634a8562ba56a722"} Feb 02 17:11:15 crc kubenswrapper[4835]: I0202 17:11:15.613993 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b706cf55-d931-4ac1-83a7-37ccf678bceb","Type":"ContainerStarted","Data":"d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c"} Feb 02 17:11:15 crc kubenswrapper[4835]: I0202 17:11:15.614056 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b706cf55-d931-4ac1-83a7-37ccf678bceb","Type":"ContainerStarted","Data":"58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150"} Feb 02 17:11:16 crc kubenswrapper[4835]: I0202 17:11:16.163695 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:16 crc kubenswrapper[4835]: I0202 17:11:16.183390 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:16 crc kubenswrapper[4835]: I0202 17:11:16.208574 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.208550766 podStartE2EDuration="3.208550766s" podCreationTimestamp="2026-02-02 17:11:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:11:15.650590535 +0000 UTC m=+1267.272194625" watchObservedRunningTime="2026-02-02 17:11:16.208550766 +0000 UTC m=+1267.830154846" Feb 02 17:11:16 crc kubenswrapper[4835]: I0202 17:11:16.628825 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ef884a8-061e-4f93-b4c9-9149a5f10f9f","Type":"ContainerStarted","Data":"c72ca7ce364d4eb3149609bd6bfee1475b893e8d462415f7bb96626f8441952e"} Feb 02 17:11:16 crc kubenswrapper[4835]: I0202 17:11:16.649055 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Feb 02 17:11:16 crc kubenswrapper[4835]: I0202 17:11:16.890204 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-f44hv"] Feb 02 17:11:16 crc kubenswrapper[4835]: I0202 17:11:16.891550 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:16 crc kubenswrapper[4835]: I0202 17:11:16.894039 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Feb 02 17:11:16 crc kubenswrapper[4835]: I0202 17:11:16.894197 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Feb 02 17:11:16 crc kubenswrapper[4835]: I0202 17:11:16.900960 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-f44hv"] Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.079063 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-scripts\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.079438 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9w8m\" (UniqueName: \"kubernetes.io/projected/7b693642-0cee-4d02-b938-4a1fc245e8a0-kube-api-access-j9w8m\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.079516 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-config-data\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.079571 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.181816 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-config-data\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.182008 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.182102 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-scripts\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.182174 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9w8m\" (UniqueName: \"kubernetes.io/projected/7b693642-0cee-4d02-b938-4a1fc245e8a0-kube-api-access-j9w8m\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.190379 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-config-data\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.192579 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.194615 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-scripts\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.202651 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9w8m\" (UniqueName: \"kubernetes.io/projected/7b693642-0cee-4d02-b938-4a1fc245e8a0-kube-api-access-j9w8m\") pod \"nova-cell1-cell-mapping-f44hv\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.259849 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.260453 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.333014 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-7krzf"] Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.333264 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" podUID="413f9f5d-9076-437b-a5b6-0b7404b81446" containerName="dnsmasq-dns" containerID="cri-o://871df2af5aa304822677e07946e6b6fa70558c987572e12e9d45067660893a1c" gracePeriod=10 Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.643523 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ef884a8-061e-4f93-b4c9-9149a5f10f9f","Type":"ContainerStarted","Data":"3daad7985b2b993065441bb8348f7de7403d33ae16d6c1b6b71129c87b3f5105"} Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.646489 4835 generic.go:334] "Generic (PLEG): container finished" podID="413f9f5d-9076-437b-a5b6-0b7404b81446" containerID="871df2af5aa304822677e07946e6b6fa70558c987572e12e9d45067660893a1c" exitCode=0 Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.646865 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" event={"ID":"413f9f5d-9076-437b-a5b6-0b7404b81446","Type":"ContainerDied","Data":"871df2af5aa304822677e07946e6b6fa70558c987572e12e9d45067660893a1c"} Feb 02 17:11:17 crc kubenswrapper[4835]: I0202 17:11:17.775901 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-f44hv"] Feb 02 17:11:17 crc kubenswrapper[4835]: W0202 17:11:17.795353 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b693642_0cee_4d02_b938_4a1fc245e8a0.slice/crio-684664ad310262fa8248753e986bb679665a7ede9cee00f02e3e142d21391307 WatchSource:0}: Error finding container 684664ad310262fa8248753e986bb679665a7ede9cee00f02e3e142d21391307: Status 404 returned error can't find the container with id 684664ad310262fa8248753e986bb679665a7ede9cee00f02e3e142d21391307 Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.039939 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.210925 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-nb\") pod \"413f9f5d-9076-437b-a5b6-0b7404b81446\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.210994 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-config\") pod \"413f9f5d-9076-437b-a5b6-0b7404b81446\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.211045 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-dns-svc\") pod \"413f9f5d-9076-437b-a5b6-0b7404b81446\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.211538 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-sb\") pod \"413f9f5d-9076-437b-a5b6-0b7404b81446\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.211637 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-988vj\" (UniqueName: \"kubernetes.io/projected/413f9f5d-9076-437b-a5b6-0b7404b81446-kube-api-access-988vj\") pod \"413f9f5d-9076-437b-a5b6-0b7404b81446\" (UID: \"413f9f5d-9076-437b-a5b6-0b7404b81446\") " Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.223546 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/413f9f5d-9076-437b-a5b6-0b7404b81446-kube-api-access-988vj" (OuterVolumeSpecName: "kube-api-access-988vj") pod "413f9f5d-9076-437b-a5b6-0b7404b81446" (UID: "413f9f5d-9076-437b-a5b6-0b7404b81446"). InnerVolumeSpecName "kube-api-access-988vj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.254109 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-config" (OuterVolumeSpecName: "config") pod "413f9f5d-9076-437b-a5b6-0b7404b81446" (UID: "413f9f5d-9076-437b-a5b6-0b7404b81446"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.259321 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "413f9f5d-9076-437b-a5b6-0b7404b81446" (UID: "413f9f5d-9076-437b-a5b6-0b7404b81446"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.259910 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "413f9f5d-9076-437b-a5b6-0b7404b81446" (UID: "413f9f5d-9076-437b-a5b6-0b7404b81446"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.264593 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "413f9f5d-9076-437b-a5b6-0b7404b81446" (UID: "413f9f5d-9076-437b-a5b6-0b7404b81446"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.314496 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.314531 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-988vj\" (UniqueName: \"kubernetes.io/projected/413f9f5d-9076-437b-a5b6-0b7404b81446-kube-api-access-988vj\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.314541 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.314551 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.314559 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/413f9f5d-9076-437b-a5b6-0b7404b81446-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.655885 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ef884a8-061e-4f93-b4c9-9149a5f10f9f","Type":"ContainerStarted","Data":"645e2f4c25377d72277822fc61cb2669bfd3fad0f5444a02d456f99875ee0f4f"} Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.657741 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-f44hv" event={"ID":"7b693642-0cee-4d02-b938-4a1fc245e8a0","Type":"ContainerStarted","Data":"4aa4ff3b7299038323b1660bf7af78493b749ca57ee87fb124941fd7b4597032"} Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.657770 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-f44hv" event={"ID":"7b693642-0cee-4d02-b938-4a1fc245e8a0","Type":"ContainerStarted","Data":"684664ad310262fa8248753e986bb679665a7ede9cee00f02e3e142d21391307"} Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.661186 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" event={"ID":"413f9f5d-9076-437b-a5b6-0b7404b81446","Type":"ContainerDied","Data":"3f7fb67d2d02510842811aee61261f7c2d1f373b8ac4c465ca1683502771b84a"} Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.661218 4835 scope.go:117] "RemoveContainer" containerID="871df2af5aa304822677e07946e6b6fa70558c987572e12e9d45067660893a1c" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.661324 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-7krzf" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.684815 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-f44hv" podStartSLOduration=2.684795894 podStartE2EDuration="2.684795894s" podCreationTimestamp="2026-02-02 17:11:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:11:18.680200244 +0000 UTC m=+1270.301804324" watchObservedRunningTime="2026-02-02 17:11:18.684795894 +0000 UTC m=+1270.306399974" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.692214 4835 scope.go:117] "RemoveContainer" containerID="4d14276e7078fb1ae5469cb0c2dc4f0ceb615e092b9a00e8c2df7341e52e38b9" Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.703885 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-7krzf"] Feb 02 17:11:18 crc kubenswrapper[4835]: I0202 17:11:18.713653 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-7krzf"] Feb 02 17:11:19 crc kubenswrapper[4835]: I0202 17:11:19.200687 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="413f9f5d-9076-437b-a5b6-0b7404b81446" path="/var/lib/kubelet/pods/413f9f5d-9076-437b-a5b6-0b7404b81446/volumes" Feb 02 17:11:20 crc kubenswrapper[4835]: I0202 17:11:20.691784 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ef884a8-061e-4f93-b4c9-9149a5f10f9f","Type":"ContainerStarted","Data":"30d59ac2b3a641c423176b98f72e51d9580fd2dbf22150c021d02826e8fee721"} Feb 02 17:11:20 crc kubenswrapper[4835]: I0202 17:11:20.692375 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 17:11:20 crc kubenswrapper[4835]: I0202 17:11:20.718673 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.007894273 podStartE2EDuration="6.718652902s" podCreationTimestamp="2026-02-02 17:11:14 +0000 UTC" firstStartedPulling="2026-02-02 17:11:15.473217821 +0000 UTC m=+1267.094821911" lastFinishedPulling="2026-02-02 17:11:20.18397646 +0000 UTC m=+1271.805580540" observedRunningTime="2026-02-02 17:11:20.717014866 +0000 UTC m=+1272.338618976" watchObservedRunningTime="2026-02-02 17:11:20.718652902 +0000 UTC m=+1272.340256982" Feb 02 17:11:23 crc kubenswrapper[4835]: I0202 17:11:23.722364 4835 generic.go:334] "Generic (PLEG): container finished" podID="7b693642-0cee-4d02-b938-4a1fc245e8a0" containerID="4aa4ff3b7299038323b1660bf7af78493b749ca57ee87fb124941fd7b4597032" exitCode=0 Feb 02 17:11:23 crc kubenswrapper[4835]: I0202 17:11:23.722459 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-f44hv" event={"ID":"7b693642-0cee-4d02-b938-4a1fc245e8a0","Type":"ContainerDied","Data":"4aa4ff3b7299038323b1660bf7af78493b749ca57ee87fb124941fd7b4597032"} Feb 02 17:11:23 crc kubenswrapper[4835]: I0202 17:11:23.949780 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 17:11:23 crc kubenswrapper[4835]: I0202 17:11:23.949845 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 17:11:24 crc kubenswrapper[4835]: I0202 17:11:24.964477 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.190:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 17:11:24 crc kubenswrapper[4835]: I0202 17:11:24.964511 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.190:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.081389 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.241595 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-config-data\") pod \"7b693642-0cee-4d02-b938-4a1fc245e8a0\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.241789 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9w8m\" (UniqueName: \"kubernetes.io/projected/7b693642-0cee-4d02-b938-4a1fc245e8a0-kube-api-access-j9w8m\") pod \"7b693642-0cee-4d02-b938-4a1fc245e8a0\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.241859 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-scripts\") pod \"7b693642-0cee-4d02-b938-4a1fc245e8a0\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.242029 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-combined-ca-bundle\") pod \"7b693642-0cee-4d02-b938-4a1fc245e8a0\" (UID: \"7b693642-0cee-4d02-b938-4a1fc245e8a0\") " Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.252436 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-scripts" (OuterVolumeSpecName: "scripts") pod "7b693642-0cee-4d02-b938-4a1fc245e8a0" (UID: "7b693642-0cee-4d02-b938-4a1fc245e8a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.252549 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b693642-0cee-4d02-b938-4a1fc245e8a0-kube-api-access-j9w8m" (OuterVolumeSpecName: "kube-api-access-j9w8m") pod "7b693642-0cee-4d02-b938-4a1fc245e8a0" (UID: "7b693642-0cee-4d02-b938-4a1fc245e8a0"). InnerVolumeSpecName "kube-api-access-j9w8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.274097 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b693642-0cee-4d02-b938-4a1fc245e8a0" (UID: "7b693642-0cee-4d02-b938-4a1fc245e8a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.278014 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-config-data" (OuterVolumeSpecName: "config-data") pod "7b693642-0cee-4d02-b938-4a1fc245e8a0" (UID: "7b693642-0cee-4d02-b938-4a1fc245e8a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.344472 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.344501 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.344512 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b693642-0cee-4d02-b938-4a1fc245e8a0-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.344521 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9w8m\" (UniqueName: \"kubernetes.io/projected/7b693642-0cee-4d02-b938-4a1fc245e8a0-kube-api-access-j9w8m\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.745036 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-f44hv" event={"ID":"7b693642-0cee-4d02-b938-4a1fc245e8a0","Type":"ContainerDied","Data":"684664ad310262fa8248753e986bb679665a7ede9cee00f02e3e142d21391307"} Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.745074 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="684664ad310262fa8248753e986bb679665a7ede9cee00f02e3e142d21391307" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.745123 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-f44hv" Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.902077 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.902492 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="1fef1efc-7676-4cc9-b903-7df1ad3f819b" containerName="nova-scheduler-scheduler" containerID="cri-o://7ddcb06a8206a807be0c793bd460849c4117af25ea7cbd4300c14eecc1ff774a" gracePeriod=30 Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.912426 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.912653 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerName="nova-api-log" containerID="cri-o://d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c" gracePeriod=30 Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.912792 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerName="nova-api-api" containerID="cri-o://58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150" gracePeriod=30 Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.934013 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.934264 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-log" containerID="cri-o://65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d" gracePeriod=30 Feb 02 17:11:25 crc kubenswrapper[4835]: I0202 17:11:25.934433 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-metadata" containerID="cri-o://3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5" gracePeriod=30 Feb 02 17:11:26 crc kubenswrapper[4835]: E0202 17:11:26.695771 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ddcb06a8206a807be0c793bd460849c4117af25ea7cbd4300c14eecc1ff774a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 17:11:26 crc kubenswrapper[4835]: E0202 17:11:26.697088 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ddcb06a8206a807be0c793bd460849c4117af25ea7cbd4300c14eecc1ff774a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 17:11:26 crc kubenswrapper[4835]: E0202 17:11:26.698716 4835 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ddcb06a8206a807be0c793bd460849c4117af25ea7cbd4300c14eecc1ff774a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 17:11:26 crc kubenswrapper[4835]: E0202 17:11:26.698749 4835 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="1fef1efc-7676-4cc9-b903-7df1ad3f819b" containerName="nova-scheduler-scheduler" Feb 02 17:11:26 crc kubenswrapper[4835]: I0202 17:11:26.755138 4835 generic.go:334] "Generic (PLEG): container finished" podID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerID="65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d" exitCode=143 Feb 02 17:11:26 crc kubenswrapper[4835]: I0202 17:11:26.755212 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2530b65e-b20c-47ce-b898-7d272a7080eb","Type":"ContainerDied","Data":"65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d"} Feb 02 17:11:26 crc kubenswrapper[4835]: I0202 17:11:26.757136 4835 generic.go:334] "Generic (PLEG): container finished" podID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerID="d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c" exitCode=143 Feb 02 17:11:26 crc kubenswrapper[4835]: I0202 17:11:26.757166 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b706cf55-d931-4ac1-83a7-37ccf678bceb","Type":"ContainerDied","Data":"d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c"} Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.087658 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.182:8775/\": read tcp 10.217.0.2:42442->10.217.0.182:8775: read: connection reset by peer" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.087727 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.182:8775/\": read tcp 10.217.0.2:42440->10.217.0.182:8775: read: connection reset by peer" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.486876 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.615751 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wh6jp\" (UniqueName: \"kubernetes.io/projected/2530b65e-b20c-47ce-b898-7d272a7080eb-kube-api-access-wh6jp\") pod \"2530b65e-b20c-47ce-b898-7d272a7080eb\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.615796 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-config-data\") pod \"2530b65e-b20c-47ce-b898-7d272a7080eb\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.615822 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2530b65e-b20c-47ce-b898-7d272a7080eb-logs\") pod \"2530b65e-b20c-47ce-b898-7d272a7080eb\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.615850 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-nova-metadata-tls-certs\") pod \"2530b65e-b20c-47ce-b898-7d272a7080eb\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.615906 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-combined-ca-bundle\") pod \"2530b65e-b20c-47ce-b898-7d272a7080eb\" (UID: \"2530b65e-b20c-47ce-b898-7d272a7080eb\") " Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.617662 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2530b65e-b20c-47ce-b898-7d272a7080eb-logs" (OuterVolumeSpecName: "logs") pod "2530b65e-b20c-47ce-b898-7d272a7080eb" (UID: "2530b65e-b20c-47ce-b898-7d272a7080eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.622079 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2530b65e-b20c-47ce-b898-7d272a7080eb-kube-api-access-wh6jp" (OuterVolumeSpecName: "kube-api-access-wh6jp") pod "2530b65e-b20c-47ce-b898-7d272a7080eb" (UID: "2530b65e-b20c-47ce-b898-7d272a7080eb"). InnerVolumeSpecName "kube-api-access-wh6jp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.647539 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2530b65e-b20c-47ce-b898-7d272a7080eb" (UID: "2530b65e-b20c-47ce-b898-7d272a7080eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.656437 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-config-data" (OuterVolumeSpecName: "config-data") pod "2530b65e-b20c-47ce-b898-7d272a7080eb" (UID: "2530b65e-b20c-47ce-b898-7d272a7080eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.669725 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "2530b65e-b20c-47ce-b898-7d272a7080eb" (UID: "2530b65e-b20c-47ce-b898-7d272a7080eb"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.717626 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wh6jp\" (UniqueName: \"kubernetes.io/projected/2530b65e-b20c-47ce-b898-7d272a7080eb-kube-api-access-wh6jp\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.717814 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.717955 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2530b65e-b20c-47ce-b898-7d272a7080eb-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.718080 4835 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.718192 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2530b65e-b20c-47ce-b898-7d272a7080eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.785037 4835 generic.go:334] "Generic (PLEG): container finished" podID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerID="3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5" exitCode=0 Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.785090 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2530b65e-b20c-47ce-b898-7d272a7080eb","Type":"ContainerDied","Data":"3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5"} Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.785123 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2530b65e-b20c-47ce-b898-7d272a7080eb","Type":"ContainerDied","Data":"5d6e3c3990a3acea7ce042dcd462efc37e86cebcc974d8416c6152efafc5a557"} Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.785144 4835 scope.go:117] "RemoveContainer" containerID="3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.785952 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.826993 4835 scope.go:117] "RemoveContainer" containerID="65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.859313 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.880650 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.889444 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:11:29 crc kubenswrapper[4835]: E0202 17:11:29.889773 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-metadata" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.889786 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-metadata" Feb 02 17:11:29 crc kubenswrapper[4835]: E0202 17:11:29.889808 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="413f9f5d-9076-437b-a5b6-0b7404b81446" containerName="init" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.889815 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="413f9f5d-9076-437b-a5b6-0b7404b81446" containerName="init" Feb 02 17:11:29 crc kubenswrapper[4835]: E0202 17:11:29.889830 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="413f9f5d-9076-437b-a5b6-0b7404b81446" containerName="dnsmasq-dns" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.889838 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="413f9f5d-9076-437b-a5b6-0b7404b81446" containerName="dnsmasq-dns" Feb 02 17:11:29 crc kubenswrapper[4835]: E0202 17:11:29.889852 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b693642-0cee-4d02-b938-4a1fc245e8a0" containerName="nova-manage" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.889858 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b693642-0cee-4d02-b938-4a1fc245e8a0" containerName="nova-manage" Feb 02 17:11:29 crc kubenswrapper[4835]: E0202 17:11:29.889867 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-log" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.889873 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-log" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.890061 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-metadata" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.890088 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="413f9f5d-9076-437b-a5b6-0b7404b81446" containerName="dnsmasq-dns" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.890099 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b693642-0cee-4d02-b938-4a1fc245e8a0" containerName="nova-manage" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.890109 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" containerName="nova-metadata-log" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.890926 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.891004 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.907906 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.907955 4835 scope.go:117] "RemoveContainer" containerID="3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.908127 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 17:11:29 crc kubenswrapper[4835]: E0202 17:11:29.913150 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5\": container with ID starting with 3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5 not found: ID does not exist" containerID="3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.913186 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5"} err="failed to get container status \"3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5\": rpc error: code = NotFound desc = could not find container \"3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5\": container with ID starting with 3a3c81557f4ce7690f5d6cb4d1af0ec1cff665a7e5e0e462e7b28b02402451e5 not found: ID does not exist" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.913212 4835 scope.go:117] "RemoveContainer" containerID="65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d" Feb 02 17:11:29 crc kubenswrapper[4835]: E0202 17:11:29.913454 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d\": container with ID starting with 65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d not found: ID does not exist" containerID="65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.913480 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d"} err="failed to get container status \"65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d\": rpc error: code = NotFound desc = could not find container \"65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d\": container with ID starting with 65d27a6cc27ea296a699a15f2b6daeda42444edbb26b3bc0312aece4b6ea310d not found: ID does not exist" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.930257 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b112a741-ef20-4e18-a161-01ed24d9b5da-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.930349 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b112a741-ef20-4e18-a161-01ed24d9b5da-logs\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.930473 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b112a741-ef20-4e18-a161-01ed24d9b5da-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.930531 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfbsz\" (UniqueName: \"kubernetes.io/projected/b112a741-ef20-4e18-a161-01ed24d9b5da-kube-api-access-jfbsz\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:29 crc kubenswrapper[4835]: I0202 17:11:29.930630 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b112a741-ef20-4e18-a161-01ed24d9b5da-config-data\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.032538 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b112a741-ef20-4e18-a161-01ed24d9b5da-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.032587 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfbsz\" (UniqueName: \"kubernetes.io/projected/b112a741-ef20-4e18-a161-01ed24d9b5da-kube-api-access-jfbsz\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.032620 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b112a741-ef20-4e18-a161-01ed24d9b5da-config-data\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.032717 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b112a741-ef20-4e18-a161-01ed24d9b5da-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.032743 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b112a741-ef20-4e18-a161-01ed24d9b5da-logs\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.033571 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b112a741-ef20-4e18-a161-01ed24d9b5da-logs\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.036392 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b112a741-ef20-4e18-a161-01ed24d9b5da-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.036516 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b112a741-ef20-4e18-a161-01ed24d9b5da-config-data\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.039131 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b112a741-ef20-4e18-a161-01ed24d9b5da-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.050910 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfbsz\" (UniqueName: \"kubernetes.io/projected/b112a741-ef20-4e18-a161-01ed24d9b5da-kube-api-access-jfbsz\") pod \"nova-metadata-0\" (UID: \"b112a741-ef20-4e18-a161-01ed24d9b5da\") " pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.235165 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.687766 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 17:11:30 crc kubenswrapper[4835]: W0202 17:11:30.705627 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb112a741_ef20_4e18_a161_01ed24d9b5da.slice/crio-3cdf8537fb7804fee6d9cc2afc55d2dafa8286981c54757039a1e90607b56246 WatchSource:0}: Error finding container 3cdf8537fb7804fee6d9cc2afc55d2dafa8286981c54757039a1e90607b56246: Status 404 returned error can't find the container with id 3cdf8537fb7804fee6d9cc2afc55d2dafa8286981c54757039a1e90607b56246 Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.792818 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.815522 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b112a741-ef20-4e18-a161-01ed24d9b5da","Type":"ContainerStarted","Data":"3cdf8537fb7804fee6d9cc2afc55d2dafa8286981c54757039a1e90607b56246"} Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.818442 4835 generic.go:334] "Generic (PLEG): container finished" podID="1fef1efc-7676-4cc9-b903-7df1ad3f819b" containerID="7ddcb06a8206a807be0c793bd460849c4117af25ea7cbd4300c14eecc1ff774a" exitCode=0 Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.818517 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1fef1efc-7676-4cc9-b903-7df1ad3f819b","Type":"ContainerDied","Data":"7ddcb06a8206a807be0c793bd460849c4117af25ea7cbd4300c14eecc1ff774a"} Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.824764 4835 generic.go:334] "Generic (PLEG): container finished" podID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerID="58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150" exitCode=0 Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.824815 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b706cf55-d931-4ac1-83a7-37ccf678bceb","Type":"ContainerDied","Data":"58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150"} Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.824847 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b706cf55-d931-4ac1-83a7-37ccf678bceb","Type":"ContainerDied","Data":"9ed628909f8de530aa46b731ce4dc269689a4ef9b979486f391828b1fdc5b2f8"} Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.824867 4835 scope.go:117] "RemoveContainer" containerID="58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.824916 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.856965 4835 scope.go:117] "RemoveContainer" containerID="d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.859997 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-config-data\") pod \"b706cf55-d931-4ac1-83a7-37ccf678bceb\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.860258 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l25df\" (UniqueName: \"kubernetes.io/projected/b706cf55-d931-4ac1-83a7-37ccf678bceb-kube-api-access-l25df\") pod \"b706cf55-d931-4ac1-83a7-37ccf678bceb\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.860716 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-public-tls-certs\") pod \"b706cf55-d931-4ac1-83a7-37ccf678bceb\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.860753 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-internal-tls-certs\") pod \"b706cf55-d931-4ac1-83a7-37ccf678bceb\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.860902 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-combined-ca-bundle\") pod \"b706cf55-d931-4ac1-83a7-37ccf678bceb\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.860961 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b706cf55-d931-4ac1-83a7-37ccf678bceb-logs\") pod \"b706cf55-d931-4ac1-83a7-37ccf678bceb\" (UID: \"b706cf55-d931-4ac1-83a7-37ccf678bceb\") " Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.861628 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b706cf55-d931-4ac1-83a7-37ccf678bceb-logs" (OuterVolumeSpecName: "logs") pod "b706cf55-d931-4ac1-83a7-37ccf678bceb" (UID: "b706cf55-d931-4ac1-83a7-37ccf678bceb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.879383 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b706cf55-d931-4ac1-83a7-37ccf678bceb-kube-api-access-l25df" (OuterVolumeSpecName: "kube-api-access-l25df") pod "b706cf55-d931-4ac1-83a7-37ccf678bceb" (UID: "b706cf55-d931-4ac1-83a7-37ccf678bceb"). InnerVolumeSpecName "kube-api-access-l25df". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.886492 4835 scope.go:117] "RemoveContainer" containerID="58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150" Feb 02 17:11:30 crc kubenswrapper[4835]: E0202 17:11:30.888827 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150\": container with ID starting with 58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150 not found: ID does not exist" containerID="58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.888867 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150"} err="failed to get container status \"58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150\": rpc error: code = NotFound desc = could not find container \"58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150\": container with ID starting with 58386333f36b36a5d47005c31245b3a746678ea41e66a94681cd66c09e838150 not found: ID does not exist" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.888897 4835 scope.go:117] "RemoveContainer" containerID="d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c" Feb 02 17:11:30 crc kubenswrapper[4835]: E0202 17:11:30.889253 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c\": container with ID starting with d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c not found: ID does not exist" containerID="d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.889368 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c"} err="failed to get container status \"d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c\": rpc error: code = NotFound desc = could not find container \"d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c\": container with ID starting with d8cd7e9d6473901d9b0c2500443cb199ec5bc99f0bf9114b132820504e1d898c not found: ID does not exist" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.906022 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-config-data" (OuterVolumeSpecName: "config-data") pod "b706cf55-d931-4ac1-83a7-37ccf678bceb" (UID: "b706cf55-d931-4ac1-83a7-37ccf678bceb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.926366 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b706cf55-d931-4ac1-83a7-37ccf678bceb" (UID: "b706cf55-d931-4ac1-83a7-37ccf678bceb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.932096 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b706cf55-d931-4ac1-83a7-37ccf678bceb" (UID: "b706cf55-d931-4ac1-83a7-37ccf678bceb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.945654 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b706cf55-d931-4ac1-83a7-37ccf678bceb" (UID: "b706cf55-d931-4ac1-83a7-37ccf678bceb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.959047 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.963109 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.963137 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b706cf55-d931-4ac1-83a7-37ccf678bceb-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.963150 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.963159 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l25df\" (UniqueName: \"kubernetes.io/projected/b706cf55-d931-4ac1-83a7-37ccf678bceb-kube-api-access-l25df\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.963169 4835 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:30 crc kubenswrapper[4835]: I0202 17:11:30.963178 4835 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b706cf55-d931-4ac1-83a7-37ccf678bceb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.064428 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-combined-ca-bundle\") pod \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.064684 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-config-data\") pod \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.064741 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s57wj\" (UniqueName: \"kubernetes.io/projected/1fef1efc-7676-4cc9-b903-7df1ad3f819b-kube-api-access-s57wj\") pod \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\" (UID: \"1fef1efc-7676-4cc9-b903-7df1ad3f819b\") " Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.068820 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fef1efc-7676-4cc9-b903-7df1ad3f819b-kube-api-access-s57wj" (OuterVolumeSpecName: "kube-api-access-s57wj") pod "1fef1efc-7676-4cc9-b903-7df1ad3f819b" (UID: "1fef1efc-7676-4cc9-b903-7df1ad3f819b"). InnerVolumeSpecName "kube-api-access-s57wj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.094827 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1fef1efc-7676-4cc9-b903-7df1ad3f819b" (UID: "1fef1efc-7676-4cc9-b903-7df1ad3f819b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.098903 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-config-data" (OuterVolumeSpecName: "config-data") pod "1fef1efc-7676-4cc9-b903-7df1ad3f819b" (UID: "1fef1efc-7676-4cc9-b903-7df1ad3f819b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.168165 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.168210 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fef1efc-7676-4cc9-b903-7df1ad3f819b-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.168225 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s57wj\" (UniqueName: \"kubernetes.io/projected/1fef1efc-7676-4cc9-b903-7df1ad3f819b-kube-api-access-s57wj\") on node \"crc\" DevicePath \"\"" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.219792 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2530b65e-b20c-47ce-b898-7d272a7080eb" path="/var/lib/kubelet/pods/2530b65e-b20c-47ce-b898-7d272a7080eb/volumes" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.280122 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.291487 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.306160 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:31 crc kubenswrapper[4835]: E0202 17:11:31.306601 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerName="nova-api-log" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.306630 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerName="nova-api-log" Feb 02 17:11:31 crc kubenswrapper[4835]: E0202 17:11:31.306644 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fef1efc-7676-4cc9-b903-7df1ad3f819b" containerName="nova-scheduler-scheduler" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.306652 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fef1efc-7676-4cc9-b903-7df1ad3f819b" containerName="nova-scheduler-scheduler" Feb 02 17:11:31 crc kubenswrapper[4835]: E0202 17:11:31.306689 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerName="nova-api-api" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.306699 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerName="nova-api-api" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.306874 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fef1efc-7676-4cc9-b903-7df1ad3f819b" containerName="nova-scheduler-scheduler" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.306898 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerName="nova-api-log" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.306923 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b706cf55-d931-4ac1-83a7-37ccf678bceb" containerName="nova-api-api" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.307973 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.312991 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.313398 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.313910 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.318741 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.370859 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94lbm\" (UniqueName: \"kubernetes.io/projected/25c6e2e4-4f57-49a6-a558-92106e3f4856-kube-api-access-94lbm\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.370938 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-config-data\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.370968 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.371025 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25c6e2e4-4f57-49a6-a558-92106e3f4856-logs\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.371053 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-internal-tls-certs\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.371105 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-public-tls-certs\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.473144 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94lbm\" (UniqueName: \"kubernetes.io/projected/25c6e2e4-4f57-49a6-a558-92106e3f4856-kube-api-access-94lbm\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.473530 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-config-data\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.473563 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.473624 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25c6e2e4-4f57-49a6-a558-92106e3f4856-logs\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.473654 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-internal-tls-certs\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.473713 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-public-tls-certs\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.474136 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25c6e2e4-4f57-49a6-a558-92106e3f4856-logs\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.478522 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.478651 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-config-data\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.479573 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-public-tls-certs\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.484774 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25c6e2e4-4f57-49a6-a558-92106e3f4856-internal-tls-certs\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.491587 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94lbm\" (UniqueName: \"kubernetes.io/projected/25c6e2e4-4f57-49a6-a558-92106e3f4856-kube-api-access-94lbm\") pod \"nova-api-0\" (UID: \"25c6e2e4-4f57-49a6-a558-92106e3f4856\") " pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.628637 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.839789 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b112a741-ef20-4e18-a161-01ed24d9b5da","Type":"ContainerStarted","Data":"6283287c140bb49a6df66ea8e36ad6556c4cd9926c1b0101d472bb125c3ef02f"} Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.840116 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b112a741-ef20-4e18-a161-01ed24d9b5da","Type":"ContainerStarted","Data":"361931ca2e320966be5f0e54fc5dd5f3665354bcbd714a5b8e098be2de1d9778"} Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.844702 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1fef1efc-7676-4cc9-b903-7df1ad3f819b","Type":"ContainerDied","Data":"38323a63666160c00bc9a0eb01170fb4fea428ed26c9435d18c6745ab5b86a43"} Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.844757 4835 scope.go:117] "RemoveContainer" containerID="7ddcb06a8206a807be0c793bd460849c4117af25ea7cbd4300c14eecc1ff774a" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.844804 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.874927 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.874905325 podStartE2EDuration="2.874905325s" podCreationTimestamp="2026-02-02 17:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:11:31.856510004 +0000 UTC m=+1283.478114094" watchObservedRunningTime="2026-02-02 17:11:31.874905325 +0000 UTC m=+1283.496509435" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.894392 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.906330 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.917252 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.918663 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.922604 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.928369 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.982363 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4594b86-bb25-4c6b-922e-ecc018bf4081-config-data\") pod \"nova-scheduler-0\" (UID: \"a4594b86-bb25-4c6b-922e-ecc018bf4081\") " pod="openstack/nova-scheduler-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.982571 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4594b86-bb25-4c6b-922e-ecc018bf4081-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a4594b86-bb25-4c6b-922e-ecc018bf4081\") " pod="openstack/nova-scheduler-0" Feb 02 17:11:31 crc kubenswrapper[4835]: I0202 17:11:31.983002 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8wd6\" (UniqueName: \"kubernetes.io/projected/a4594b86-bb25-4c6b-922e-ecc018bf4081-kube-api-access-n8wd6\") pod \"nova-scheduler-0\" (UID: \"a4594b86-bb25-4c6b-922e-ecc018bf4081\") " pod="openstack/nova-scheduler-0" Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.060350 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.084886 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8wd6\" (UniqueName: \"kubernetes.io/projected/a4594b86-bb25-4c6b-922e-ecc018bf4081-kube-api-access-n8wd6\") pod \"nova-scheduler-0\" (UID: \"a4594b86-bb25-4c6b-922e-ecc018bf4081\") " pod="openstack/nova-scheduler-0" Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.085189 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4594b86-bb25-4c6b-922e-ecc018bf4081-config-data\") pod \"nova-scheduler-0\" (UID: \"a4594b86-bb25-4c6b-922e-ecc018bf4081\") " pod="openstack/nova-scheduler-0" Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.085351 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4594b86-bb25-4c6b-922e-ecc018bf4081-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a4594b86-bb25-4c6b-922e-ecc018bf4081\") " pod="openstack/nova-scheduler-0" Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.090984 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4594b86-bb25-4c6b-922e-ecc018bf4081-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a4594b86-bb25-4c6b-922e-ecc018bf4081\") " pod="openstack/nova-scheduler-0" Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.091878 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4594b86-bb25-4c6b-922e-ecc018bf4081-config-data\") pod \"nova-scheduler-0\" (UID: \"a4594b86-bb25-4c6b-922e-ecc018bf4081\") " pod="openstack/nova-scheduler-0" Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.105775 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8wd6\" (UniqueName: \"kubernetes.io/projected/a4594b86-bb25-4c6b-922e-ecc018bf4081-kube-api-access-n8wd6\") pod \"nova-scheduler-0\" (UID: \"a4594b86-bb25-4c6b-922e-ecc018bf4081\") " pod="openstack/nova-scheduler-0" Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.240429 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.667427 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.855807 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"25c6e2e4-4f57-49a6-a558-92106e3f4856","Type":"ContainerStarted","Data":"58cf3eecabb5474c46061df984557764fb11441ac3d1a3a0447495dee425f985"} Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.855874 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"25c6e2e4-4f57-49a6-a558-92106e3f4856","Type":"ContainerStarted","Data":"ce6fe189bc4002903d4b402c9cbe67c7567db5301a7b566a2d0dbc99e6d7102f"} Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.855890 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"25c6e2e4-4f57-49a6-a558-92106e3f4856","Type":"ContainerStarted","Data":"c80d1d8ff94fd7215d6100b405b8214fbf8df439bb1a8919e091c91907df983d"} Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.857934 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a4594b86-bb25-4c6b-922e-ecc018bf4081","Type":"ContainerStarted","Data":"7a64879380ae663cad9759d5c75227a2f25d821b91276b13cb2f6074a3ab63f8"} Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.857979 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a4594b86-bb25-4c6b-922e-ecc018bf4081","Type":"ContainerStarted","Data":"09674f83047721e541b0560945724805ccdb8904c112ec99bc4bfc2884af4cff"} Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.880934 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.8809163359999999 podStartE2EDuration="1.880916336s" podCreationTimestamp="2026-02-02 17:11:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:11:32.874860104 +0000 UTC m=+1284.496464194" watchObservedRunningTime="2026-02-02 17:11:32.880916336 +0000 UTC m=+1284.502520416" Feb 02 17:11:32 crc kubenswrapper[4835]: I0202 17:11:32.892616 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.892594506 podStartE2EDuration="1.892594506s" podCreationTimestamp="2026-02-02 17:11:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:11:32.888373627 +0000 UTC m=+1284.509977717" watchObservedRunningTime="2026-02-02 17:11:32.892594506 +0000 UTC m=+1284.514198596" Feb 02 17:11:33 crc kubenswrapper[4835]: I0202 17:11:33.206333 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fef1efc-7676-4cc9-b903-7df1ad3f819b" path="/var/lib/kubelet/pods/1fef1efc-7676-4cc9-b903-7df1ad3f819b/volumes" Feb 02 17:11:33 crc kubenswrapper[4835]: I0202 17:11:33.207219 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b706cf55-d931-4ac1-83a7-37ccf678bceb" path="/var/lib/kubelet/pods/b706cf55-d931-4ac1-83a7-37ccf678bceb/volumes" Feb 02 17:11:35 crc kubenswrapper[4835]: I0202 17:11:35.235894 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 17:11:35 crc kubenswrapper[4835]: I0202 17:11:35.236232 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 17:11:37 crc kubenswrapper[4835]: I0202 17:11:37.240572 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 17:11:40 crc kubenswrapper[4835]: I0202 17:11:40.235981 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 17:11:40 crc kubenswrapper[4835]: I0202 17:11:40.236325 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 17:11:41 crc kubenswrapper[4835]: I0202 17:11:41.247512 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b112a741-ef20-4e18-a161-01ed24d9b5da" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 17:11:41 crc kubenswrapper[4835]: I0202 17:11:41.247515 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b112a741-ef20-4e18-a161-01ed24d9b5da" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 17:11:41 crc kubenswrapper[4835]: I0202 17:11:41.629142 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 17:11:41 crc kubenswrapper[4835]: I0202 17:11:41.629488 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 17:11:42 crc kubenswrapper[4835]: I0202 17:11:42.241169 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 17:11:42 crc kubenswrapper[4835]: I0202 17:11:42.273886 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 17:11:42 crc kubenswrapper[4835]: I0202 17:11:42.641428 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="25c6e2e4-4f57-49a6-a558-92106e3f4856" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.194:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 17:11:42 crc kubenswrapper[4835]: I0202 17:11:42.641439 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="25c6e2e4-4f57-49a6-a558-92106e3f4856" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.194:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 17:11:42 crc kubenswrapper[4835]: I0202 17:11:42.977806 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 17:11:45 crc kubenswrapper[4835]: I0202 17:11:45.021720 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 17:11:50 crc kubenswrapper[4835]: I0202 17:11:50.240917 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 17:11:50 crc kubenswrapper[4835]: I0202 17:11:50.243048 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 17:11:50 crc kubenswrapper[4835]: I0202 17:11:50.246652 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 17:11:51 crc kubenswrapper[4835]: I0202 17:11:51.041726 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 17:11:51 crc kubenswrapper[4835]: I0202 17:11:51.640487 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 17:11:51 crc kubenswrapper[4835]: I0202 17:11:51.640981 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 17:11:51 crc kubenswrapper[4835]: I0202 17:11:51.641694 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 17:11:51 crc kubenswrapper[4835]: I0202 17:11:51.647434 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 17:11:52 crc kubenswrapper[4835]: I0202 17:11:52.046087 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 17:11:52 crc kubenswrapper[4835]: I0202 17:11:52.051299 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 17:11:59 crc kubenswrapper[4835]: I0202 17:11:59.478425 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 17:12:00 crc kubenswrapper[4835]: I0202 17:12:00.389045 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 17:12:04 crc kubenswrapper[4835]: I0202 17:12:04.041045 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="5cb4e8f7-3881-4fef-9056-0e2f149aab21" containerName="rabbitmq" containerID="cri-o://c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec" gracePeriod=604796 Feb 02 17:12:04 crc kubenswrapper[4835]: I0202 17:12:04.487440 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="8ddf6049-a0d8-429c-b8ce-b52702f4ee60" containerName="rabbitmq" containerID="cri-o://17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c" gracePeriod=604796 Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.362936 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="5cb4e8f7-3881-4fef-9056-0e2f149aab21" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.714051 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="8ddf6049-a0d8-429c-b8ce-b52702f4ee60" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.780793 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.907974 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bnzz\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-kube-api-access-7bnzz\") pod \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.908109 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-server-conf\") pod \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.908192 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5cb4e8f7-3881-4fef-9056-0e2f149aab21-erlang-cookie-secret\") pod \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.908215 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-tls\") pod \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.908332 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-erlang-cookie\") pod \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.908386 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-confd\") pod \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.908480 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-config-data\") pod \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.908529 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5cb4e8f7-3881-4fef-9056-0e2f149aab21-pod-info\") pod \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.908557 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.908651 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-plugins-conf\") pod \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.908718 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-plugins\") pod \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\" (UID: \"5cb4e8f7-3881-4fef-9056-0e2f149aab21\") " Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.912263 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "5cb4e8f7-3881-4fef-9056-0e2f149aab21" (UID: "5cb4e8f7-3881-4fef-9056-0e2f149aab21"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.915237 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "5cb4e8f7-3881-4fef-9056-0e2f149aab21" (UID: "5cb4e8f7-3881-4fef-9056-0e2f149aab21"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.915413 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "5cb4e8f7-3881-4fef-9056-0e2f149aab21" (UID: "5cb4e8f7-3881-4fef-9056-0e2f149aab21"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.918571 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-kube-api-access-7bnzz" (OuterVolumeSpecName: "kube-api-access-7bnzz") pod "5cb4e8f7-3881-4fef-9056-0e2f149aab21" (UID: "5cb4e8f7-3881-4fef-9056-0e2f149aab21"). InnerVolumeSpecName "kube-api-access-7bnzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.919066 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "5cb4e8f7-3881-4fef-9056-0e2f149aab21" (UID: "5cb4e8f7-3881-4fef-9056-0e2f149aab21"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.920852 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "5cb4e8f7-3881-4fef-9056-0e2f149aab21" (UID: "5cb4e8f7-3881-4fef-9056-0e2f149aab21"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.929486 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/5cb4e8f7-3881-4fef-9056-0e2f149aab21-pod-info" (OuterVolumeSpecName: "pod-info") pod "5cb4e8f7-3881-4fef-9056-0e2f149aab21" (UID: "5cb4e8f7-3881-4fef-9056-0e2f149aab21"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.941871 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cb4e8f7-3881-4fef-9056-0e2f149aab21-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "5cb4e8f7-3881-4fef-9056-0e2f149aab21" (UID: "5cb4e8f7-3881-4fef-9056-0e2f149aab21"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:12:10 crc kubenswrapper[4835]: I0202 17:12:10.987730 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-config-data" (OuterVolumeSpecName: "config-data") pod "5cb4e8f7-3881-4fef-9056-0e2f149aab21" (UID: "5cb4e8f7-3881-4fef-9056-0e2f149aab21"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.012722 4835 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5cb4e8f7-3881-4fef-9056-0e2f149aab21-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.012761 4835 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.012770 4835 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.012784 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.012795 4835 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5cb4e8f7-3881-4fef-9056-0e2f149aab21-pod-info\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.012822 4835 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.012832 4835 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.012842 4835 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.012856 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bnzz\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-kube-api-access-7bnzz\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.025745 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-server-conf" (OuterVolumeSpecName: "server-conf") pod "5cb4e8f7-3881-4fef-9056-0e2f149aab21" (UID: "5cb4e8f7-3881-4fef-9056-0e2f149aab21"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.049104 4835 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.116144 4835 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.116192 4835 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5cb4e8f7-3881-4fef-9056-0e2f149aab21-server-conf\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.128657 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "5cb4e8f7-3881-4fef-9056-0e2f149aab21" (UID: "5cb4e8f7-3881-4fef-9056-0e2f149aab21"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.140135 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.217436 4835 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5cb4e8f7-3881-4fef-9056-0e2f149aab21-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.226593 4835 generic.go:334] "Generic (PLEG): container finished" podID="8ddf6049-a0d8-429c-b8ce-b52702f4ee60" containerID="17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c" exitCode=0 Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.226710 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8ddf6049-a0d8-429c-b8ce-b52702f4ee60","Type":"ContainerDied","Data":"17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c"} Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.226743 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8ddf6049-a0d8-429c-b8ce-b52702f4ee60","Type":"ContainerDied","Data":"251360b863ebf09072834186269e1cb759543038e161d94f3103b1309d124262"} Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.226765 4835 scope.go:117] "RemoveContainer" containerID="17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.226935 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.232240 4835 generic.go:334] "Generic (PLEG): container finished" podID="5cb4e8f7-3881-4fef-9056-0e2f149aab21" containerID="c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec" exitCode=0 Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.232527 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5cb4e8f7-3881-4fef-9056-0e2f149aab21","Type":"ContainerDied","Data":"c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec"} Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.232556 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5cb4e8f7-3881-4fef-9056-0e2f149aab21","Type":"ContainerDied","Data":"d7c52681deda19177201596fffd71b2de67fbc321c2ace1d46a817b6a21bb383"} Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.232635 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.274427 4835 scope.go:117] "RemoveContainer" containerID="d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.310353 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.318683 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-config-data\") pod \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.318735 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-server-conf\") pod \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.318767 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-pod-info\") pod \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.318836 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slbzw\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-kube-api-access-slbzw\") pod \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.318907 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-plugins-conf\") pod \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.318953 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-tls\") pod \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.319030 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-plugins\") pod \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.319071 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-erlang-cookie\") pod \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.319095 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-confd\") pod \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.319128 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-erlang-cookie-secret\") pod \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.319169 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\" (UID: \"8ddf6049-a0d8-429c-b8ce-b52702f4ee60\") " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.334196 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "8ddf6049-a0d8-429c-b8ce-b52702f4ee60" (UID: "8ddf6049-a0d8-429c-b8ce-b52702f4ee60"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.334444 4835 scope.go:117] "RemoveContainer" containerID="17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.335266 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "8ddf6049-a0d8-429c-b8ce-b52702f4ee60" (UID: "8ddf6049-a0d8-429c-b8ce-b52702f4ee60"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: E0202 17:12:11.339862 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c\": container with ID starting with 17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c not found: ID does not exist" containerID="17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.339931 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c"} err="failed to get container status \"17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c\": rpc error: code = NotFound desc = could not find container \"17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c\": container with ID starting with 17e74e5edbc18188f109c57b8222e28c6d43bb5a23e699f25cb77702f0b5179c not found: ID does not exist" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.339978 4835 scope.go:117] "RemoveContainer" containerID="d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7" Feb 02 17:12:11 crc kubenswrapper[4835]: E0202 17:12:11.341659 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7\": container with ID starting with d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7 not found: ID does not exist" containerID="d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.341786 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7"} err="failed to get container status \"d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7\": rpc error: code = NotFound desc = could not find container \"d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7\": container with ID starting with d3f160d2343ba070a9a7959428fb32c0d3e6fca0fe7151ee2d96694ac06e46c7 not found: ID does not exist" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.341837 4835 scope.go:117] "RemoveContainer" containerID="c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.343547 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "8ddf6049-a0d8-429c-b8ce-b52702f4ee60" (UID: "8ddf6049-a0d8-429c-b8ce-b52702f4ee60"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.345475 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-kube-api-access-slbzw" (OuterVolumeSpecName: "kube-api-access-slbzw") pod "8ddf6049-a0d8-429c-b8ce-b52702f4ee60" (UID: "8ddf6049-a0d8-429c-b8ce-b52702f4ee60"). InnerVolumeSpecName "kube-api-access-slbzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.349036 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "8ddf6049-a0d8-429c-b8ce-b52702f4ee60" (UID: "8ddf6049-a0d8-429c-b8ce-b52702f4ee60"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.356743 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "8ddf6049-a0d8-429c-b8ce-b52702f4ee60" (UID: "8ddf6049-a0d8-429c-b8ce-b52702f4ee60"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.365692 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.379459 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-pod-info" (OuterVolumeSpecName: "pod-info") pod "8ddf6049-a0d8-429c-b8ce-b52702f4ee60" (UID: "8ddf6049-a0d8-429c-b8ce-b52702f4ee60"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.390809 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-config-data" (OuterVolumeSpecName: "config-data") pod "8ddf6049-a0d8-429c-b8ce-b52702f4ee60" (UID: "8ddf6049-a0d8-429c-b8ce-b52702f4ee60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.395943 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 17:12:11 crc kubenswrapper[4835]: E0202 17:12:11.401097 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb4e8f7-3881-4fef-9056-0e2f149aab21" containerName="setup-container" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.401156 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb4e8f7-3881-4fef-9056-0e2f149aab21" containerName="setup-container" Feb 02 17:12:11 crc kubenswrapper[4835]: E0202 17:12:11.401175 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb4e8f7-3881-4fef-9056-0e2f149aab21" containerName="rabbitmq" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.401181 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb4e8f7-3881-4fef-9056-0e2f149aab21" containerName="rabbitmq" Feb 02 17:12:11 crc kubenswrapper[4835]: E0202 17:12:11.401217 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ddf6049-a0d8-429c-b8ce-b52702f4ee60" containerName="rabbitmq" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.401224 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ddf6049-a0d8-429c-b8ce-b52702f4ee60" containerName="rabbitmq" Feb 02 17:12:11 crc kubenswrapper[4835]: E0202 17:12:11.401236 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ddf6049-a0d8-429c-b8ce-b52702f4ee60" containerName="setup-container" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.401242 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ddf6049-a0d8-429c-b8ce-b52702f4ee60" containerName="setup-container" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.402016 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cb4e8f7-3881-4fef-9056-0e2f149aab21" containerName="rabbitmq" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.402042 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ddf6049-a0d8-429c-b8ce-b52702f4ee60" containerName="rabbitmq" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.403529 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.406353 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.407027 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jm6m2" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.407212 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.407359 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.407386 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.407403 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.408655 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "8ddf6049-a0d8-429c-b8ce-b52702f4ee60" (UID: "8ddf6049-a0d8-429c-b8ce-b52702f4ee60"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.409608 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.421308 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.421865 4835 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-pod-info\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.421896 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slbzw\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-kube-api-access-slbzw\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.421906 4835 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.421914 4835 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.421922 4835 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.421932 4835 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.421940 4835 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.421974 4835 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.421984 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.452724 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-server-conf" (OuterVolumeSpecName: "server-conf") pod "8ddf6049-a0d8-429c-b8ce-b52702f4ee60" (UID: "8ddf6049-a0d8-429c-b8ce-b52702f4ee60"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.459132 4835 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.529009 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "8ddf6049-a0d8-429c-b8ce-b52702f4ee60" (UID: "8ddf6049-a0d8-429c-b8ce-b52702f4ee60"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.537720 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.537774 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.537898 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-config-data\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.537928 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.537989 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.538032 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.538064 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.538153 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.538430 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.538486 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.538574 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhxkh\" (UniqueName: \"kubernetes.io/projected/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-kube-api-access-dhxkh\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.538748 4835 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.538771 4835 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-server-conf\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.538786 4835 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8ddf6049-a0d8-429c-b8ce-b52702f4ee60-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.539997 4835 scope.go:117] "RemoveContainer" containerID="62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.558986 4835 scope.go:117] "RemoveContainer" containerID="c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec" Feb 02 17:12:11 crc kubenswrapper[4835]: E0202 17:12:11.559490 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec\": container with ID starting with c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec not found: ID does not exist" containerID="c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.559544 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec"} err="failed to get container status \"c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec\": rpc error: code = NotFound desc = could not find container \"c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec\": container with ID starting with c60956cfa0850d622f3b207332d124e838efa5123aed3a7ab10180946bd52fec not found: ID does not exist" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.559581 4835 scope.go:117] "RemoveContainer" containerID="62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4" Feb 02 17:12:11 crc kubenswrapper[4835]: E0202 17:12:11.560019 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4\": container with ID starting with 62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4 not found: ID does not exist" containerID="62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.560046 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4"} err="failed to get container status \"62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4\": rpc error: code = NotFound desc = could not find container \"62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4\": container with ID starting with 62fb2c64ae9f100b0028d7f827fa51c5d8d89e989b636d29c9baef5754c50cd4 not found: ID does not exist" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.635174 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-4r9z6"] Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.638209 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.639770 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhxkh\" (UniqueName: \"kubernetes.io/projected/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-kube-api-access-dhxkh\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.639836 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.639854 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.639899 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-config-data\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.639919 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.639942 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.639964 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.639981 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.640014 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.640041 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.640057 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.640452 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.640532 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.641398 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.641534 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-config-data\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.641667 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.642710 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.644768 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.647589 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.649215 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.649583 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.649894 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.677895 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-4r9z6"] Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.685242 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhxkh\" (UniqueName: \"kubernetes.io/projected/cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450-kube-api-access-dhxkh\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.714554 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450\") " pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.741977 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m42j4\" (UniqueName: \"kubernetes.io/projected/857e5478-8cd3-42e3-98f9-b63358eab982-kube-api-access-m42j4\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.742023 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.742060 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-dns-svc\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.742103 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.742263 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.742605 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-config\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.844003 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-config\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.844360 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m42j4\" (UniqueName: \"kubernetes.io/projected/857e5478-8cd3-42e3-98f9-b63358eab982-kube-api-access-m42j4\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.844392 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.844415 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-dns-svc\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.844463 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.844520 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.845409 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.845970 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-config\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.846591 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.846633 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-dns-svc\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.847219 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.847230 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.864491 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m42j4\" (UniqueName: \"kubernetes.io/projected/857e5478-8cd3-42e3-98f9-b63358eab982-kube-api-access-m42j4\") pod \"dnsmasq-dns-578b8d767c-4r9z6\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.865900 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.878042 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.890454 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.896602 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.900534 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.900760 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.901927 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.902331 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.902487 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.903399 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.903562 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-mjbch" Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.904592 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 17:12:11 crc kubenswrapper[4835]: I0202 17:12:11.962374 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.047665 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.047728 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.047758 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.047777 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhchp\" (UniqueName: \"kubernetes.io/projected/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-kube-api-access-nhchp\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.047877 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.047933 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.047962 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.048000 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.048023 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.048060 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.048100 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.149452 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.149513 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.149556 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.149578 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.149618 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.149657 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.149707 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.149735 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.149760 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.149781 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhchp\" (UniqueName: \"kubernetes.io/projected/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-kube-api-access-nhchp\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.149874 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.150531 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.150909 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.151389 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.152601 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.153263 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.153484 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.156267 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.158847 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.159129 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.164033 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.173170 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhchp\" (UniqueName: \"kubernetes.io/projected/e7014da0-d4d6-4279-9f39-e50a4bbcdda5-kube-api-access-nhchp\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.189175 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e7014da0-d4d6-4279-9f39-e50a4bbcdda5\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.217579 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.379322 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 17:12:12 crc kubenswrapper[4835]: W0202 17:12:12.380087 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf1f6e5c_fe3f_4be0_9931_dd3aa5a61450.slice/crio-c18805c371525f9e163d7f48b094a785b55d05868e5cc4498b3cffdccca3a1da WatchSource:0}: Error finding container c18805c371525f9e163d7f48b094a785b55d05868e5cc4498b3cffdccca3a1da: Status 404 returned error can't find the container with id c18805c371525f9e163d7f48b094a785b55d05868e5cc4498b3cffdccca3a1da Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.514722 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-4r9z6"] Feb 02 17:12:12 crc kubenswrapper[4835]: W0202 17:12:12.516594 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod857e5478_8cd3_42e3_98f9_b63358eab982.slice/crio-8e2197ce0fb25f4b3eb4443fbe92205282e4a1097054194dc47515f19eec2811 WatchSource:0}: Error finding container 8e2197ce0fb25f4b3eb4443fbe92205282e4a1097054194dc47515f19eec2811: Status 404 returned error can't find the container with id 8e2197ce0fb25f4b3eb4443fbe92205282e4a1097054194dc47515f19eec2811 Feb 02 17:12:12 crc kubenswrapper[4835]: W0202 17:12:12.664751 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7014da0_d4d6_4279_9f39_e50a4bbcdda5.slice/crio-cc302afb345cc5fb338546da9fa4a487e3cd3e6162935404a59b923e91504f26 WatchSource:0}: Error finding container cc302afb345cc5fb338546da9fa4a487e3cd3e6162935404a59b923e91504f26: Status 404 returned error can't find the container with id cc302afb345cc5fb338546da9fa4a487e3cd3e6162935404a59b923e91504f26 Feb 02 17:12:12 crc kubenswrapper[4835]: I0202 17:12:12.665748 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 17:12:13 crc kubenswrapper[4835]: I0202 17:12:13.203490 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cb4e8f7-3881-4fef-9056-0e2f149aab21" path="/var/lib/kubelet/pods/5cb4e8f7-3881-4fef-9056-0e2f149aab21/volumes" Feb 02 17:12:13 crc kubenswrapper[4835]: I0202 17:12:13.205012 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ddf6049-a0d8-429c-b8ce-b52702f4ee60" path="/var/lib/kubelet/pods/8ddf6049-a0d8-429c-b8ce-b52702f4ee60/volumes" Feb 02 17:12:13 crc kubenswrapper[4835]: I0202 17:12:13.262316 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7014da0-d4d6-4279-9f39-e50a4bbcdda5","Type":"ContainerStarted","Data":"cc302afb345cc5fb338546da9fa4a487e3cd3e6162935404a59b923e91504f26"} Feb 02 17:12:13 crc kubenswrapper[4835]: I0202 17:12:13.264586 4835 generic.go:334] "Generic (PLEG): container finished" podID="857e5478-8cd3-42e3-98f9-b63358eab982" containerID="5eb674ad5ddf572da793b634be22b96a98307448b14195794b2c4395f060d45e" exitCode=0 Feb 02 17:12:13 crc kubenswrapper[4835]: I0202 17:12:13.264627 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" event={"ID":"857e5478-8cd3-42e3-98f9-b63358eab982","Type":"ContainerDied","Data":"5eb674ad5ddf572da793b634be22b96a98307448b14195794b2c4395f060d45e"} Feb 02 17:12:13 crc kubenswrapper[4835]: I0202 17:12:13.265704 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" event={"ID":"857e5478-8cd3-42e3-98f9-b63358eab982","Type":"ContainerStarted","Data":"8e2197ce0fb25f4b3eb4443fbe92205282e4a1097054194dc47515f19eec2811"} Feb 02 17:12:13 crc kubenswrapper[4835]: I0202 17:12:13.268217 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450","Type":"ContainerStarted","Data":"c18805c371525f9e163d7f48b094a785b55d05868e5cc4498b3cffdccca3a1da"} Feb 02 17:12:14 crc kubenswrapper[4835]: I0202 17:12:14.282189 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450","Type":"ContainerStarted","Data":"d2c0724e43916c7c2310484540174043846d252094bb2fddfac0af98e91c1e6f"} Feb 02 17:12:14 crc kubenswrapper[4835]: I0202 17:12:14.284925 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7014da0-d4d6-4279-9f39-e50a4bbcdda5","Type":"ContainerStarted","Data":"c28489731f535c66da049613440e4462e865b7341f7f430fbbf7e0889558710b"} Feb 02 17:12:14 crc kubenswrapper[4835]: I0202 17:12:14.297743 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" event={"ID":"857e5478-8cd3-42e3-98f9-b63358eab982","Type":"ContainerStarted","Data":"368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239"} Feb 02 17:12:14 crc kubenswrapper[4835]: I0202 17:12:14.298608 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:14 crc kubenswrapper[4835]: I0202 17:12:14.352439 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" podStartSLOduration=3.352421291 podStartE2EDuration="3.352421291s" podCreationTimestamp="2026-02-02 17:12:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:12:14.344337912 +0000 UTC m=+1325.965942012" watchObservedRunningTime="2026-02-02 17:12:14.352421291 +0000 UTC m=+1325.974025371" Feb 02 17:12:21 crc kubenswrapper[4835]: I0202 17:12:21.964442 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.022669 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-k4zxs"] Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.022956 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" podUID="95e4982e-b72d-4b5e-9c32-cbda57e9d23b" containerName="dnsmasq-dns" containerID="cri-o://c5f663722c1dc0dc104ee1222f1650be13c2c806f554ad72864793c60c7b8159" gracePeriod=10 Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.171893 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-hwd6p"] Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.175223 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.191948 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-hwd6p"] Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.244512 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-nb\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.244612 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlzrt\" (UniqueName: \"kubernetes.io/projected/38d5997d-17a2-4379-bf47-ff2ef2705e77-kube-api-access-jlzrt\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.244693 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-sb\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.244804 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-openstack-edpm-ipam\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.244841 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-dns-svc\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.245052 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-config\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.259993 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" podUID="95e4982e-b72d-4b5e-9c32-cbda57e9d23b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.189:5353: connect: connection refused" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.347009 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-openstack-edpm-ipam\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.347087 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-dns-svc\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.347115 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-config\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.347185 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-nb\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.347228 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlzrt\" (UniqueName: \"kubernetes.io/projected/38d5997d-17a2-4379-bf47-ff2ef2705e77-kube-api-access-jlzrt\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.347251 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-sb\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.348133 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-openstack-edpm-ipam\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.348281 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-sb\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.348260 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-dns-svc\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.348516 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-config\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.348697 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-nb\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.365737 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlzrt\" (UniqueName: \"kubernetes.io/projected/38d5997d-17a2-4379-bf47-ff2ef2705e77-kube-api-access-jlzrt\") pod \"dnsmasq-dns-fbc59fbb7-hwd6p\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.387377 4835 generic.go:334] "Generic (PLEG): container finished" podID="95e4982e-b72d-4b5e-9c32-cbda57e9d23b" containerID="c5f663722c1dc0dc104ee1222f1650be13c2c806f554ad72864793c60c7b8159" exitCode=0 Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.387750 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" event={"ID":"95e4982e-b72d-4b5e-9c32-cbda57e9d23b","Type":"ContainerDied","Data":"c5f663722c1dc0dc104ee1222f1650be13c2c806f554ad72864793c60c7b8159"} Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.544484 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.618124 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.752685 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-config\") pod \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.752737 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-nb\") pod \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.752788 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56rp6\" (UniqueName: \"kubernetes.io/projected/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-kube-api-access-56rp6\") pod \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.752838 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-dns-svc\") pod \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.752959 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-sb\") pod \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\" (UID: \"95e4982e-b72d-4b5e-9c32-cbda57e9d23b\") " Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.758429 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-kube-api-access-56rp6" (OuterVolumeSpecName: "kube-api-access-56rp6") pod "95e4982e-b72d-4b5e-9c32-cbda57e9d23b" (UID: "95e4982e-b72d-4b5e-9c32-cbda57e9d23b"). InnerVolumeSpecName "kube-api-access-56rp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.801244 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-config" (OuterVolumeSpecName: "config") pod "95e4982e-b72d-4b5e-9c32-cbda57e9d23b" (UID: "95e4982e-b72d-4b5e-9c32-cbda57e9d23b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.802467 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "95e4982e-b72d-4b5e-9c32-cbda57e9d23b" (UID: "95e4982e-b72d-4b5e-9c32-cbda57e9d23b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.802827 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "95e4982e-b72d-4b5e-9c32-cbda57e9d23b" (UID: "95e4982e-b72d-4b5e-9c32-cbda57e9d23b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.808888 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "95e4982e-b72d-4b5e-9c32-cbda57e9d23b" (UID: "95e4982e-b72d-4b5e-9c32-cbda57e9d23b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.855031 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.855061 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.855140 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56rp6\" (UniqueName: \"kubernetes.io/projected/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-kube-api-access-56rp6\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.855154 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:22 crc kubenswrapper[4835]: I0202 17:12:22.855164 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/95e4982e-b72d-4b5e-9c32-cbda57e9d23b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:23 crc kubenswrapper[4835]: I0202 17:12:23.001386 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-hwd6p"] Feb 02 17:12:23 crc kubenswrapper[4835]: I0202 17:12:23.396993 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" Feb 02 17:12:23 crc kubenswrapper[4835]: I0202 17:12:23.396993 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-k4zxs" event={"ID":"95e4982e-b72d-4b5e-9c32-cbda57e9d23b","Type":"ContainerDied","Data":"999fb1c4d55e8be2f36e97293d232e30de81f5ea4eae768ef8d4c00ee95eb6ca"} Feb 02 17:12:23 crc kubenswrapper[4835]: I0202 17:12:23.397452 4835 scope.go:117] "RemoveContainer" containerID="c5f663722c1dc0dc104ee1222f1650be13c2c806f554ad72864793c60c7b8159" Feb 02 17:12:23 crc kubenswrapper[4835]: I0202 17:12:23.399074 4835 generic.go:334] "Generic (PLEG): container finished" podID="38d5997d-17a2-4379-bf47-ff2ef2705e77" containerID="759ffb18a3684d96dc3a839088c978da4f2f17a9a00c0bc11ab37f5c638145a2" exitCode=0 Feb 02 17:12:23 crc kubenswrapper[4835]: I0202 17:12:23.399115 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" event={"ID":"38d5997d-17a2-4379-bf47-ff2ef2705e77","Type":"ContainerDied","Data":"759ffb18a3684d96dc3a839088c978da4f2f17a9a00c0bc11ab37f5c638145a2"} Feb 02 17:12:23 crc kubenswrapper[4835]: I0202 17:12:23.399162 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" event={"ID":"38d5997d-17a2-4379-bf47-ff2ef2705e77","Type":"ContainerStarted","Data":"185be7cc9129ce62f1b698a346a5425d2ed776f32a26018009bce11ff9686e80"} Feb 02 17:12:23 crc kubenswrapper[4835]: I0202 17:12:23.432831 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-k4zxs"] Feb 02 17:12:23 crc kubenswrapper[4835]: I0202 17:12:23.436330 4835 scope.go:117] "RemoveContainer" containerID="83ba2b8c03ea8252292c077d3b16456a34e3f5aed4e984d9e9b3e75ae79c4a10" Feb 02 17:12:23 crc kubenswrapper[4835]: I0202 17:12:23.446215 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-k4zxs"] Feb 02 17:12:24 crc kubenswrapper[4835]: I0202 17:12:24.409718 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" event={"ID":"38d5997d-17a2-4379-bf47-ff2ef2705e77","Type":"ContainerStarted","Data":"955fdfdfd066e707901defaf326fb2def2d45f6b6c33529f1b8b5edc5cf0a938"} Feb 02 17:12:24 crc kubenswrapper[4835]: I0202 17:12:24.410181 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:24 crc kubenswrapper[4835]: I0202 17:12:24.432119 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" podStartSLOduration=2.43209965 podStartE2EDuration="2.43209965s" podCreationTimestamp="2026-02-02 17:12:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:12:24.430790743 +0000 UTC m=+1336.052394833" watchObservedRunningTime="2026-02-02 17:12:24.43209965 +0000 UTC m=+1336.053703730" Feb 02 17:12:25 crc kubenswrapper[4835]: I0202 17:12:25.198477 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95e4982e-b72d-4b5e-9c32-cbda57e9d23b" path="/var/lib/kubelet/pods/95e4982e-b72d-4b5e-9c32-cbda57e9d23b/volumes" Feb 02 17:12:32 crc kubenswrapper[4835]: I0202 17:12:32.546306 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:12:32 crc kubenswrapper[4835]: I0202 17:12:32.620888 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-4r9z6"] Feb 02 17:12:32 crc kubenswrapper[4835]: I0202 17:12:32.621124 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" podUID="857e5478-8cd3-42e3-98f9-b63358eab982" containerName="dnsmasq-dns" containerID="cri-o://368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239" gracePeriod=10 Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.104141 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.235944 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-dns-svc\") pod \"857e5478-8cd3-42e3-98f9-b63358eab982\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.236060 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-sb\") pod \"857e5478-8cd3-42e3-98f9-b63358eab982\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.236092 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-nb\") pod \"857e5478-8cd3-42e3-98f9-b63358eab982\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.236166 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-openstack-edpm-ipam\") pod \"857e5478-8cd3-42e3-98f9-b63358eab982\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.236214 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m42j4\" (UniqueName: \"kubernetes.io/projected/857e5478-8cd3-42e3-98f9-b63358eab982-kube-api-access-m42j4\") pod \"857e5478-8cd3-42e3-98f9-b63358eab982\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.236313 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-config\") pod \"857e5478-8cd3-42e3-98f9-b63358eab982\" (UID: \"857e5478-8cd3-42e3-98f9-b63358eab982\") " Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.285833 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/857e5478-8cd3-42e3-98f9-b63358eab982-kube-api-access-m42j4" (OuterVolumeSpecName: "kube-api-access-m42j4") pod "857e5478-8cd3-42e3-98f9-b63358eab982" (UID: "857e5478-8cd3-42e3-98f9-b63358eab982"). InnerVolumeSpecName "kube-api-access-m42j4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.306983 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "857e5478-8cd3-42e3-98f9-b63358eab982" (UID: "857e5478-8cd3-42e3-98f9-b63358eab982"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.309631 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "857e5478-8cd3-42e3-98f9-b63358eab982" (UID: "857e5478-8cd3-42e3-98f9-b63358eab982"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.320817 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "857e5478-8cd3-42e3-98f9-b63358eab982" (UID: "857e5478-8cd3-42e3-98f9-b63358eab982"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.331200 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-config" (OuterVolumeSpecName: "config") pod "857e5478-8cd3-42e3-98f9-b63358eab982" (UID: "857e5478-8cd3-42e3-98f9-b63358eab982"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.338865 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.338893 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.338904 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.338913 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m42j4\" (UniqueName: \"kubernetes.io/projected/857e5478-8cd3-42e3-98f9-b63358eab982-kube-api-access-m42j4\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.338922 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.356942 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "857e5478-8cd3-42e3-98f9-b63358eab982" (UID: "857e5478-8cd3-42e3-98f9-b63358eab982"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.440662 4835 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/857e5478-8cd3-42e3-98f9-b63358eab982-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.490441 4835 generic.go:334] "Generic (PLEG): container finished" podID="857e5478-8cd3-42e3-98f9-b63358eab982" containerID="368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239" exitCode=0 Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.490490 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" event={"ID":"857e5478-8cd3-42e3-98f9-b63358eab982","Type":"ContainerDied","Data":"368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239"} Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.490527 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.490545 4835 scope.go:117] "RemoveContainer" containerID="368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.490529 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-4r9z6" event={"ID":"857e5478-8cd3-42e3-98f9-b63358eab982","Type":"ContainerDied","Data":"8e2197ce0fb25f4b3eb4443fbe92205282e4a1097054194dc47515f19eec2811"} Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.512787 4835 scope.go:117] "RemoveContainer" containerID="5eb674ad5ddf572da793b634be22b96a98307448b14195794b2c4395f060d45e" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.547201 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-4r9z6"] Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.548519 4835 scope.go:117] "RemoveContainer" containerID="368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239" Feb 02 17:12:33 crc kubenswrapper[4835]: E0202 17:12:33.549721 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239\": container with ID starting with 368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239 not found: ID does not exist" containerID="368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.549759 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239"} err="failed to get container status \"368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239\": rpc error: code = NotFound desc = could not find container \"368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239\": container with ID starting with 368525bd435bcb2c3d570c15ff087cc94e0bec5d2023e15c05ae8259a57ed239 not found: ID does not exist" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.549789 4835 scope.go:117] "RemoveContainer" containerID="5eb674ad5ddf572da793b634be22b96a98307448b14195794b2c4395f060d45e" Feb 02 17:12:33 crc kubenswrapper[4835]: E0202 17:12:33.550193 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5eb674ad5ddf572da793b634be22b96a98307448b14195794b2c4395f060d45e\": container with ID starting with 5eb674ad5ddf572da793b634be22b96a98307448b14195794b2c4395f060d45e not found: ID does not exist" containerID="5eb674ad5ddf572da793b634be22b96a98307448b14195794b2c4395f060d45e" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.550219 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5eb674ad5ddf572da793b634be22b96a98307448b14195794b2c4395f060d45e"} err="failed to get container status \"5eb674ad5ddf572da793b634be22b96a98307448b14195794b2c4395f060d45e\": rpc error: code = NotFound desc = could not find container \"5eb674ad5ddf572da793b634be22b96a98307448b14195794b2c4395f060d45e\": container with ID starting with 5eb674ad5ddf572da793b634be22b96a98307448b14195794b2c4395f060d45e not found: ID does not exist" Feb 02 17:12:33 crc kubenswrapper[4835]: I0202 17:12:33.555851 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-4r9z6"] Feb 02 17:12:35 crc kubenswrapper[4835]: I0202 17:12:35.200473 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="857e5478-8cd3-42e3-98f9-b63358eab982" path="/var/lib/kubelet/pods/857e5478-8cd3-42e3-98f9-b63358eab982/volumes" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.433863 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9"] Feb 02 17:12:38 crc kubenswrapper[4835]: E0202 17:12:38.435731 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95e4982e-b72d-4b5e-9c32-cbda57e9d23b" containerName="dnsmasq-dns" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.435857 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="95e4982e-b72d-4b5e-9c32-cbda57e9d23b" containerName="dnsmasq-dns" Feb 02 17:12:38 crc kubenswrapper[4835]: E0202 17:12:38.435943 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="857e5478-8cd3-42e3-98f9-b63358eab982" containerName="init" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.436023 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="857e5478-8cd3-42e3-98f9-b63358eab982" containerName="init" Feb 02 17:12:38 crc kubenswrapper[4835]: E0202 17:12:38.436108 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95e4982e-b72d-4b5e-9c32-cbda57e9d23b" containerName="init" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.436176 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="95e4982e-b72d-4b5e-9c32-cbda57e9d23b" containerName="init" Feb 02 17:12:38 crc kubenswrapper[4835]: E0202 17:12:38.436252 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="857e5478-8cd3-42e3-98f9-b63358eab982" containerName="dnsmasq-dns" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.436335 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="857e5478-8cd3-42e3-98f9-b63358eab982" containerName="dnsmasq-dns" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.436607 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="857e5478-8cd3-42e3-98f9-b63358eab982" containerName="dnsmasq-dns" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.436739 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="95e4982e-b72d-4b5e-9c32-cbda57e9d23b" containerName="dnsmasq-dns" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.437564 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.440053 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.440061 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.440204 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.440492 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.443724 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9"] Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.524942 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv5d7\" (UniqueName: \"kubernetes.io/projected/12e6900a-36b2-4110-8f06-d37236112c63-kube-api-access-fv5d7\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.525199 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.525307 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.525356 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.626841 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv5d7\" (UniqueName: \"kubernetes.io/projected/12e6900a-36b2-4110-8f06-d37236112c63-kube-api-access-fv5d7\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.627407 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.627690 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.627999 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.632962 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.633206 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.635939 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.642756 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv5d7\" (UniqueName: \"kubernetes.io/projected/12e6900a-36b2-4110-8f06-d37236112c63-kube-api-access-fv5d7\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:38 crc kubenswrapper[4835]: I0202 17:12:38.761190 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:12:39 crc kubenswrapper[4835]: I0202 17:12:39.295576 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9"] Feb 02 17:12:39 crc kubenswrapper[4835]: W0202 17:12:39.299985 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12e6900a_36b2_4110_8f06_d37236112c63.slice/crio-3561f8fb820a00571bdde393f6785ef7c7532060e64b1b21e0f6bb2c5c7fa4a6 WatchSource:0}: Error finding container 3561f8fb820a00571bdde393f6785ef7c7532060e64b1b21e0f6bb2c5c7fa4a6: Status 404 returned error can't find the container with id 3561f8fb820a00571bdde393f6785ef7c7532060e64b1b21e0f6bb2c5c7fa4a6 Feb 02 17:12:39 crc kubenswrapper[4835]: I0202 17:12:39.305251 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 17:12:39 crc kubenswrapper[4835]: I0202 17:12:39.542585 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" event={"ID":"12e6900a-36b2-4110-8f06-d37236112c63","Type":"ContainerStarted","Data":"3561f8fb820a00571bdde393f6785ef7c7532060e64b1b21e0f6bb2c5c7fa4a6"} Feb 02 17:12:46 crc kubenswrapper[4835]: I0202 17:12:46.605349 4835 generic.go:334] "Generic (PLEG): container finished" podID="cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450" containerID="d2c0724e43916c7c2310484540174043846d252094bb2fddfac0af98e91c1e6f" exitCode=0 Feb 02 17:12:46 crc kubenswrapper[4835]: I0202 17:12:46.606133 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450","Type":"ContainerDied","Data":"d2c0724e43916c7c2310484540174043846d252094bb2fddfac0af98e91c1e6f"} Feb 02 17:12:46 crc kubenswrapper[4835]: I0202 17:12:46.613384 4835 generic.go:334] "Generic (PLEG): container finished" podID="e7014da0-d4d6-4279-9f39-e50a4bbcdda5" containerID="c28489731f535c66da049613440e4462e865b7341f7f430fbbf7e0889558710b" exitCode=0 Feb 02 17:12:46 crc kubenswrapper[4835]: I0202 17:12:46.613430 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7014da0-d4d6-4279-9f39-e50a4bbcdda5","Type":"ContainerDied","Data":"c28489731f535c66da049613440e4462e865b7341f7f430fbbf7e0889558710b"} Feb 02 17:12:48 crc kubenswrapper[4835]: I0202 17:12:48.634434 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" event={"ID":"12e6900a-36b2-4110-8f06-d37236112c63","Type":"ContainerStarted","Data":"bbd2052111b641a4ab901e87a92e9b75841bb197c69d06f41fd37dffb7812984"} Feb 02 17:12:48 crc kubenswrapper[4835]: I0202 17:12:48.638384 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e7014da0-d4d6-4279-9f39-e50a4bbcdda5","Type":"ContainerStarted","Data":"4d6e241739d44fd4cdb5539b36ba652df764675a72d1f655e5782701e296e5ff"} Feb 02 17:12:48 crc kubenswrapper[4835]: I0202 17:12:48.638924 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:12:48 crc kubenswrapper[4835]: I0202 17:12:48.642939 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450","Type":"ContainerStarted","Data":"a8e3de0bebea90747bd887a25884ef58f753a20eac90599ba395e2c2e5c4a4a3"} Feb 02 17:12:48 crc kubenswrapper[4835]: I0202 17:12:48.643575 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 02 17:12:48 crc kubenswrapper[4835]: I0202 17:12:48.676024 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" podStartSLOduration=2.189241034 podStartE2EDuration="10.676006246s" podCreationTimestamp="2026-02-02 17:12:38 +0000 UTC" firstStartedPulling="2026-02-02 17:12:39.304908479 +0000 UTC m=+1350.926512579" lastFinishedPulling="2026-02-02 17:12:47.791673701 +0000 UTC m=+1359.413277791" observedRunningTime="2026-02-02 17:12:48.667548776 +0000 UTC m=+1360.289152856" watchObservedRunningTime="2026-02-02 17:12:48.676006246 +0000 UTC m=+1360.297610326" Feb 02 17:12:48 crc kubenswrapper[4835]: I0202 17:12:48.705659 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.705637916 podStartE2EDuration="37.705637916s" podCreationTimestamp="2026-02-02 17:12:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:12:48.699925904 +0000 UTC m=+1360.321529994" watchObservedRunningTime="2026-02-02 17:12:48.705637916 +0000 UTC m=+1360.327241996" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.196405 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=39.196383067 podStartE2EDuration="39.196383067s" podCreationTimestamp="2026-02-02 17:12:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:12:48.750659673 +0000 UTC m=+1360.372263753" watchObservedRunningTime="2026-02-02 17:12:50.196383067 +0000 UTC m=+1361.817987147" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.198249 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-88h9v"] Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.200412 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.220767 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-88h9v"] Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.359893 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-catalog-content\") pod \"redhat-operators-88h9v\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.359949 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-utilities\") pod \"redhat-operators-88h9v\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.360087 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g75ck\" (UniqueName: \"kubernetes.io/projected/770650ff-827d-4b45-a9bc-64e1db201be6-kube-api-access-g75ck\") pod \"redhat-operators-88h9v\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.461351 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-catalog-content\") pod \"redhat-operators-88h9v\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.461409 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-utilities\") pod \"redhat-operators-88h9v\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.461488 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g75ck\" (UniqueName: \"kubernetes.io/projected/770650ff-827d-4b45-a9bc-64e1db201be6-kube-api-access-g75ck\") pod \"redhat-operators-88h9v\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.461898 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-catalog-content\") pod \"redhat-operators-88h9v\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.461919 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-utilities\") pod \"redhat-operators-88h9v\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.497193 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g75ck\" (UniqueName: \"kubernetes.io/projected/770650ff-827d-4b45-a9bc-64e1db201be6-kube-api-access-g75ck\") pod \"redhat-operators-88h9v\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:12:50 crc kubenswrapper[4835]: I0202 17:12:50.521982 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:12:51 crc kubenswrapper[4835]: I0202 17:12:51.013463 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-88h9v"] Feb 02 17:12:51 crc kubenswrapper[4835]: I0202 17:12:51.677378 4835 generic.go:334] "Generic (PLEG): container finished" podID="770650ff-827d-4b45-a9bc-64e1db201be6" containerID="5fe5fb257a70f8ac187b56ae0076c979140f48860871cdb42a4fcc81151f70a9" exitCode=0 Feb 02 17:12:51 crc kubenswrapper[4835]: I0202 17:12:51.677446 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88h9v" event={"ID":"770650ff-827d-4b45-a9bc-64e1db201be6","Type":"ContainerDied","Data":"5fe5fb257a70f8ac187b56ae0076c979140f48860871cdb42a4fcc81151f70a9"} Feb 02 17:12:51 crc kubenswrapper[4835]: I0202 17:12:51.677704 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88h9v" event={"ID":"770650ff-827d-4b45-a9bc-64e1db201be6","Type":"ContainerStarted","Data":"f120864f728ca0594c0162817f730c888c3eb5d46bc82f5bc39632533027b17c"} Feb 02 17:12:52 crc kubenswrapper[4835]: I0202 17:12:52.689669 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88h9v" event={"ID":"770650ff-827d-4b45-a9bc-64e1db201be6","Type":"ContainerStarted","Data":"fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913"} Feb 02 17:12:53 crc kubenswrapper[4835]: I0202 17:12:53.699013 4835 generic.go:334] "Generic (PLEG): container finished" podID="770650ff-827d-4b45-a9bc-64e1db201be6" containerID="fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913" exitCode=0 Feb 02 17:12:53 crc kubenswrapper[4835]: I0202 17:12:53.699050 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88h9v" event={"ID":"770650ff-827d-4b45-a9bc-64e1db201be6","Type":"ContainerDied","Data":"fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913"} Feb 02 17:12:54 crc kubenswrapper[4835]: I0202 17:12:54.718312 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88h9v" event={"ID":"770650ff-827d-4b45-a9bc-64e1db201be6","Type":"ContainerStarted","Data":"0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf"} Feb 02 17:12:59 crc kubenswrapper[4835]: I0202 17:12:59.760999 4835 generic.go:334] "Generic (PLEG): container finished" podID="12e6900a-36b2-4110-8f06-d37236112c63" containerID="bbd2052111b641a4ab901e87a92e9b75841bb197c69d06f41fd37dffb7812984" exitCode=0 Feb 02 17:12:59 crc kubenswrapper[4835]: I0202 17:12:59.761089 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" event={"ID":"12e6900a-36b2-4110-8f06-d37236112c63","Type":"ContainerDied","Data":"bbd2052111b641a4ab901e87a92e9b75841bb197c69d06f41fd37dffb7812984"} Feb 02 17:12:59 crc kubenswrapper[4835]: I0202 17:12:59.776913 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-88h9v" podStartSLOduration=7.256999881 podStartE2EDuration="9.776889512s" podCreationTimestamp="2026-02-02 17:12:50 +0000 UTC" firstStartedPulling="2026-02-02 17:12:51.678926365 +0000 UTC m=+1363.300530445" lastFinishedPulling="2026-02-02 17:12:54.198815996 +0000 UTC m=+1365.820420076" observedRunningTime="2026-02-02 17:12:54.758872687 +0000 UTC m=+1366.380476777" watchObservedRunningTime="2026-02-02 17:12:59.776889512 +0000 UTC m=+1371.398493592" Feb 02 17:13:00 crc kubenswrapper[4835]: I0202 17:13:00.523521 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:13:00 crc kubenswrapper[4835]: I0202 17:13:00.523601 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:13:00 crc kubenswrapper[4835]: I0202 17:13:00.575773 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:13:00 crc kubenswrapper[4835]: I0202 17:13:00.822153 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:13:00 crc kubenswrapper[4835]: I0202 17:13:00.873010 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-88h9v"] Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.195556 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.302915 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-ssh-key-openstack-edpm-ipam\") pod \"12e6900a-36b2-4110-8f06-d37236112c63\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.302987 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-repo-setup-combined-ca-bundle\") pod \"12e6900a-36b2-4110-8f06-d37236112c63\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.303121 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-inventory\") pod \"12e6900a-36b2-4110-8f06-d37236112c63\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.303221 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fv5d7\" (UniqueName: \"kubernetes.io/projected/12e6900a-36b2-4110-8f06-d37236112c63-kube-api-access-fv5d7\") pod \"12e6900a-36b2-4110-8f06-d37236112c63\" (UID: \"12e6900a-36b2-4110-8f06-d37236112c63\") " Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.308861 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "12e6900a-36b2-4110-8f06-d37236112c63" (UID: "12e6900a-36b2-4110-8f06-d37236112c63"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.309511 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12e6900a-36b2-4110-8f06-d37236112c63-kube-api-access-fv5d7" (OuterVolumeSpecName: "kube-api-access-fv5d7") pod "12e6900a-36b2-4110-8f06-d37236112c63" (UID: "12e6900a-36b2-4110-8f06-d37236112c63"). InnerVolumeSpecName "kube-api-access-fv5d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.330164 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "12e6900a-36b2-4110-8f06-d37236112c63" (UID: "12e6900a-36b2-4110-8f06-d37236112c63"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.339559 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-inventory" (OuterVolumeSpecName: "inventory") pod "12e6900a-36b2-4110-8f06-d37236112c63" (UID: "12e6900a-36b2-4110-8f06-d37236112c63"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.405980 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.406020 4835 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.406033 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12e6900a-36b2-4110-8f06-d37236112c63-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.406044 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fv5d7\" (UniqueName: \"kubernetes.io/projected/12e6900a-36b2-4110-8f06-d37236112c63-kube-api-access-fv5d7\") on node \"crc\" DevicePath \"\"" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.781247 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" event={"ID":"12e6900a-36b2-4110-8f06-d37236112c63","Type":"ContainerDied","Data":"3561f8fb820a00571bdde393f6785ef7c7532060e64b1b21e0f6bb2c5c7fa4a6"} Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.781604 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3561f8fb820a00571bdde393f6785ef7c7532060e64b1b21e0f6bb2c5c7fa4a6" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.781262 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.856769 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.864861 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq"] Feb 02 17:13:01 crc kubenswrapper[4835]: E0202 17:13:01.865491 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12e6900a-36b2-4110-8f06-d37236112c63" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.865608 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="12e6900a-36b2-4110-8f06-d37236112c63" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.865920 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="12e6900a-36b2-4110-8f06-d37236112c63" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.867651 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.873805 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.874241 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.874558 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.875682 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:13:01 crc kubenswrapper[4835]: I0202 17:13:01.881187 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq"] Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.018079 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.018262 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzlwm\" (UniqueName: \"kubernetes.io/projected/ca98e7db-a3cd-4839-991d-d8a08b956675-kube-api-access-fzlwm\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.018331 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.018957 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.120375 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.120506 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.120556 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzlwm\" (UniqueName: \"kubernetes.io/projected/ca98e7db-a3cd-4839-991d-d8a08b956675-kube-api-access-fzlwm\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.120587 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.125310 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.129263 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.130806 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.141808 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzlwm\" (UniqueName: \"kubernetes.io/projected/ca98e7db-a3cd-4839-991d-d8a08b956675-kube-api-access-fzlwm\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.202637 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.220477 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.787402 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-88h9v" podUID="770650ff-827d-4b45-a9bc-64e1db201be6" containerName="registry-server" containerID="cri-o://0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf" gracePeriod=2 Feb 02 17:13:02 crc kubenswrapper[4835]: I0202 17:13:02.801027 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq"] Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.287602 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.447757 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g75ck\" (UniqueName: \"kubernetes.io/projected/770650ff-827d-4b45-a9bc-64e1db201be6-kube-api-access-g75ck\") pod \"770650ff-827d-4b45-a9bc-64e1db201be6\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.448255 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-catalog-content\") pod \"770650ff-827d-4b45-a9bc-64e1db201be6\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.448352 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-utilities\") pod \"770650ff-827d-4b45-a9bc-64e1db201be6\" (UID: \"770650ff-827d-4b45-a9bc-64e1db201be6\") " Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.449506 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-utilities" (OuterVolumeSpecName: "utilities") pod "770650ff-827d-4b45-a9bc-64e1db201be6" (UID: "770650ff-827d-4b45-a9bc-64e1db201be6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.457405 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/770650ff-827d-4b45-a9bc-64e1db201be6-kube-api-access-g75ck" (OuterVolumeSpecName: "kube-api-access-g75ck") pod "770650ff-827d-4b45-a9bc-64e1db201be6" (UID: "770650ff-827d-4b45-a9bc-64e1db201be6"). InnerVolumeSpecName "kube-api-access-g75ck". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.555506 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.555542 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g75ck\" (UniqueName: \"kubernetes.io/projected/770650ff-827d-4b45-a9bc-64e1db201be6-kube-api-access-g75ck\") on node \"crc\" DevicePath \"\"" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.565691 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "770650ff-827d-4b45-a9bc-64e1db201be6" (UID: "770650ff-827d-4b45-a9bc-64e1db201be6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.657197 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/770650ff-827d-4b45-a9bc-64e1db201be6-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.797823 4835 generic.go:334] "Generic (PLEG): container finished" podID="770650ff-827d-4b45-a9bc-64e1db201be6" containerID="0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf" exitCode=0 Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.797895 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88h9v" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.797880 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88h9v" event={"ID":"770650ff-827d-4b45-a9bc-64e1db201be6","Type":"ContainerDied","Data":"0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf"} Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.798051 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88h9v" event={"ID":"770650ff-827d-4b45-a9bc-64e1db201be6","Type":"ContainerDied","Data":"f120864f728ca0594c0162817f730c888c3eb5d46bc82f5bc39632533027b17c"} Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.798079 4835 scope.go:117] "RemoveContainer" containerID="0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.799924 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" event={"ID":"ca98e7db-a3cd-4839-991d-d8a08b956675","Type":"ContainerStarted","Data":"c50efc1128a8d979799dffcd562cb64b1af0d2133b130187d442aad0e8ef5688"} Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.799956 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" event={"ID":"ca98e7db-a3cd-4839-991d-d8a08b956675","Type":"ContainerStarted","Data":"f2610bce23d5dadc7f9d02f75fa1c26fae42d4f739e7a4837fb7c2ce2fa6def6"} Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.819207 4835 scope.go:117] "RemoveContainer" containerID="fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.844612 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" podStartSLOduration=2.356366528 podStartE2EDuration="2.844595212s" podCreationTimestamp="2026-02-02 17:13:01 +0000 UTC" firstStartedPulling="2026-02-02 17:13:02.81824903 +0000 UTC m=+1374.439853110" lastFinishedPulling="2026-02-02 17:13:03.306477714 +0000 UTC m=+1374.928081794" observedRunningTime="2026-02-02 17:13:03.838973263 +0000 UTC m=+1375.460577343" watchObservedRunningTime="2026-02-02 17:13:03.844595212 +0000 UTC m=+1375.466199292" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.857254 4835 scope.go:117] "RemoveContainer" containerID="5fe5fb257a70f8ac187b56ae0076c979140f48860871cdb42a4fcc81151f70a9" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.861595 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-88h9v"] Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.875372 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-88h9v"] Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.905012 4835 scope.go:117] "RemoveContainer" containerID="0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf" Feb 02 17:13:03 crc kubenswrapper[4835]: E0202 17:13:03.905528 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf\": container with ID starting with 0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf not found: ID does not exist" containerID="0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.905567 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf"} err="failed to get container status \"0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf\": rpc error: code = NotFound desc = could not find container \"0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf\": container with ID starting with 0789b3bcdce22f6f354e04cb9d07d3ef341532a8baaf2933925c1e1aff1539bf not found: ID does not exist" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.905593 4835 scope.go:117] "RemoveContainer" containerID="fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913" Feb 02 17:13:03 crc kubenswrapper[4835]: E0202 17:13:03.905880 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913\": container with ID starting with fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913 not found: ID does not exist" containerID="fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.905899 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913"} err="failed to get container status \"fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913\": rpc error: code = NotFound desc = could not find container \"fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913\": container with ID starting with fd4eab9be31e82356b5b86cd34c0bdeb0b03baf4602f4b351fb49cb74c9f4913 not found: ID does not exist" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.905913 4835 scope.go:117] "RemoveContainer" containerID="5fe5fb257a70f8ac187b56ae0076c979140f48860871cdb42a4fcc81151f70a9" Feb 02 17:13:03 crc kubenswrapper[4835]: E0202 17:13:03.906261 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fe5fb257a70f8ac187b56ae0076c979140f48860871cdb42a4fcc81151f70a9\": container with ID starting with 5fe5fb257a70f8ac187b56ae0076c979140f48860871cdb42a4fcc81151f70a9 not found: ID does not exist" containerID="5fe5fb257a70f8ac187b56ae0076c979140f48860871cdb42a4fcc81151f70a9" Feb 02 17:13:03 crc kubenswrapper[4835]: I0202 17:13:03.906305 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fe5fb257a70f8ac187b56ae0076c979140f48860871cdb42a4fcc81151f70a9"} err="failed to get container status \"5fe5fb257a70f8ac187b56ae0076c979140f48860871cdb42a4fcc81151f70a9\": rpc error: code = NotFound desc = could not find container \"5fe5fb257a70f8ac187b56ae0076c979140f48860871cdb42a4fcc81151f70a9\": container with ID starting with 5fe5fb257a70f8ac187b56ae0076c979140f48860871cdb42a4fcc81151f70a9 not found: ID does not exist" Feb 02 17:13:05 crc kubenswrapper[4835]: I0202 17:13:05.201114 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="770650ff-827d-4b45-a9bc-64e1db201be6" path="/var/lib/kubelet/pods/770650ff-827d-4b45-a9bc-64e1db201be6/volumes" Feb 02 17:13:44 crc kubenswrapper[4835]: I0202 17:13:44.870218 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:13:44 crc kubenswrapper[4835]: I0202 17:13:44.871397 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:13:49 crc kubenswrapper[4835]: I0202 17:13:49.880654 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5b85g"] Feb 02 17:13:49 crc kubenswrapper[4835]: E0202 17:13:49.882341 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="770650ff-827d-4b45-a9bc-64e1db201be6" containerName="extract-utilities" Feb 02 17:13:49 crc kubenswrapper[4835]: I0202 17:13:49.882360 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="770650ff-827d-4b45-a9bc-64e1db201be6" containerName="extract-utilities" Feb 02 17:13:49 crc kubenswrapper[4835]: E0202 17:13:49.882386 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="770650ff-827d-4b45-a9bc-64e1db201be6" containerName="extract-content" Feb 02 17:13:49 crc kubenswrapper[4835]: I0202 17:13:49.882395 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="770650ff-827d-4b45-a9bc-64e1db201be6" containerName="extract-content" Feb 02 17:13:49 crc kubenswrapper[4835]: E0202 17:13:49.882423 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="770650ff-827d-4b45-a9bc-64e1db201be6" containerName="registry-server" Feb 02 17:13:49 crc kubenswrapper[4835]: I0202 17:13:49.882432 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="770650ff-827d-4b45-a9bc-64e1db201be6" containerName="registry-server" Feb 02 17:13:49 crc kubenswrapper[4835]: I0202 17:13:49.882645 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="770650ff-827d-4b45-a9bc-64e1db201be6" containerName="registry-server" Feb 02 17:13:49 crc kubenswrapper[4835]: I0202 17:13:49.886399 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:13:49 crc kubenswrapper[4835]: I0202 17:13:49.895969 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5b85g"] Feb 02 17:13:49 crc kubenswrapper[4835]: I0202 17:13:49.971021 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4fgt\" (UniqueName: \"kubernetes.io/projected/611d8207-d4ee-4548-9eaf-2a21f0587471-kube-api-access-g4fgt\") pod \"certified-operators-5b85g\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:13:49 crc kubenswrapper[4835]: I0202 17:13:49.971562 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-utilities\") pod \"certified-operators-5b85g\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:13:49 crc kubenswrapper[4835]: I0202 17:13:49.971704 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-catalog-content\") pod \"certified-operators-5b85g\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:13:50 crc kubenswrapper[4835]: I0202 17:13:50.073795 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-utilities\") pod \"certified-operators-5b85g\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:13:50 crc kubenswrapper[4835]: I0202 17:13:50.073898 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-catalog-content\") pod \"certified-operators-5b85g\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:13:50 crc kubenswrapper[4835]: I0202 17:13:50.074040 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4fgt\" (UniqueName: \"kubernetes.io/projected/611d8207-d4ee-4548-9eaf-2a21f0587471-kube-api-access-g4fgt\") pod \"certified-operators-5b85g\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:13:50 crc kubenswrapper[4835]: I0202 17:13:50.074295 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-utilities\") pod \"certified-operators-5b85g\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:13:50 crc kubenswrapper[4835]: I0202 17:13:50.074552 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-catalog-content\") pod \"certified-operators-5b85g\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:13:50 crc kubenswrapper[4835]: I0202 17:13:50.096422 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4fgt\" (UniqueName: \"kubernetes.io/projected/611d8207-d4ee-4548-9eaf-2a21f0587471-kube-api-access-g4fgt\") pod \"certified-operators-5b85g\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:13:50 crc kubenswrapper[4835]: I0202 17:13:50.215594 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:13:50 crc kubenswrapper[4835]: I0202 17:13:50.758883 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5b85g"] Feb 02 17:13:51 crc kubenswrapper[4835]: I0202 17:13:51.236528 4835 generic.go:334] "Generic (PLEG): container finished" podID="611d8207-d4ee-4548-9eaf-2a21f0587471" containerID="7013ad65b4ee818e3e1b299c30e3bc94c1b11e0138024de2b063cbbeaaaac9ff" exitCode=0 Feb 02 17:13:51 crc kubenswrapper[4835]: I0202 17:13:51.236571 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5b85g" event={"ID":"611d8207-d4ee-4548-9eaf-2a21f0587471","Type":"ContainerDied","Data":"7013ad65b4ee818e3e1b299c30e3bc94c1b11e0138024de2b063cbbeaaaac9ff"} Feb 02 17:13:51 crc kubenswrapper[4835]: I0202 17:13:51.236923 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5b85g" event={"ID":"611d8207-d4ee-4548-9eaf-2a21f0587471","Type":"ContainerStarted","Data":"ff6123948cb90b4b40c519b93475a864989535601c33bad2b9717af133a12bb1"} Feb 02 17:13:52 crc kubenswrapper[4835]: I0202 17:13:52.254373 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5b85g" event={"ID":"611d8207-d4ee-4548-9eaf-2a21f0587471","Type":"ContainerStarted","Data":"41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4"} Feb 02 17:13:53 crc kubenswrapper[4835]: I0202 17:13:53.268343 4835 generic.go:334] "Generic (PLEG): container finished" podID="611d8207-d4ee-4548-9eaf-2a21f0587471" containerID="41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4" exitCode=0 Feb 02 17:13:53 crc kubenswrapper[4835]: I0202 17:13:53.268449 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5b85g" event={"ID":"611d8207-d4ee-4548-9eaf-2a21f0587471","Type":"ContainerDied","Data":"41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4"} Feb 02 17:13:53 crc kubenswrapper[4835]: I0202 17:13:53.268769 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5b85g" event={"ID":"611d8207-d4ee-4548-9eaf-2a21f0587471","Type":"ContainerStarted","Data":"8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4"} Feb 02 17:13:53 crc kubenswrapper[4835]: I0202 17:13:53.296961 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5b85g" podStartSLOduration=2.856258263 podStartE2EDuration="4.296941964s" podCreationTimestamp="2026-02-02 17:13:49 +0000 UTC" firstStartedPulling="2026-02-02 17:13:51.238373543 +0000 UTC m=+1422.859977623" lastFinishedPulling="2026-02-02 17:13:52.679057244 +0000 UTC m=+1424.300661324" observedRunningTime="2026-02-02 17:13:53.289795461 +0000 UTC m=+1424.911399551" watchObservedRunningTime="2026-02-02 17:13:53.296941964 +0000 UTC m=+1424.918546044" Feb 02 17:14:00 crc kubenswrapper[4835]: I0202 17:14:00.215719 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:14:00 crc kubenswrapper[4835]: I0202 17:14:00.216149 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:14:00 crc kubenswrapper[4835]: I0202 17:14:00.268248 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:14:00 crc kubenswrapper[4835]: I0202 17:14:00.390504 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:14:00 crc kubenswrapper[4835]: I0202 17:14:00.507303 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5b85g"] Feb 02 17:14:02 crc kubenswrapper[4835]: I0202 17:14:02.363942 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5b85g" podUID="611d8207-d4ee-4548-9eaf-2a21f0587471" containerName="registry-server" containerID="cri-o://8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4" gracePeriod=2 Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.308730 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.374347 4835 generic.go:334] "Generic (PLEG): container finished" podID="611d8207-d4ee-4548-9eaf-2a21f0587471" containerID="8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4" exitCode=0 Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.374387 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5b85g" event={"ID":"611d8207-d4ee-4548-9eaf-2a21f0587471","Type":"ContainerDied","Data":"8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4"} Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.374412 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5b85g" event={"ID":"611d8207-d4ee-4548-9eaf-2a21f0587471","Type":"ContainerDied","Data":"ff6123948cb90b4b40c519b93475a864989535601c33bad2b9717af133a12bb1"} Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.374417 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5b85g" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.374427 4835 scope.go:117] "RemoveContainer" containerID="8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.394580 4835 scope.go:117] "RemoveContainer" containerID="41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.416909 4835 scope.go:117] "RemoveContainer" containerID="7013ad65b4ee818e3e1b299c30e3bc94c1b11e0138024de2b063cbbeaaaac9ff" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.426095 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-utilities\") pod \"611d8207-d4ee-4548-9eaf-2a21f0587471\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.426310 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4fgt\" (UniqueName: \"kubernetes.io/projected/611d8207-d4ee-4548-9eaf-2a21f0587471-kube-api-access-g4fgt\") pod \"611d8207-d4ee-4548-9eaf-2a21f0587471\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.426380 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-catalog-content\") pod \"611d8207-d4ee-4548-9eaf-2a21f0587471\" (UID: \"611d8207-d4ee-4548-9eaf-2a21f0587471\") " Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.427319 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-utilities" (OuterVolumeSpecName: "utilities") pod "611d8207-d4ee-4548-9eaf-2a21f0587471" (UID: "611d8207-d4ee-4548-9eaf-2a21f0587471"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.432893 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/611d8207-d4ee-4548-9eaf-2a21f0587471-kube-api-access-g4fgt" (OuterVolumeSpecName: "kube-api-access-g4fgt") pod "611d8207-d4ee-4548-9eaf-2a21f0587471" (UID: "611d8207-d4ee-4548-9eaf-2a21f0587471"). InnerVolumeSpecName "kube-api-access-g4fgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.442284 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4fgt\" (UniqueName: \"kubernetes.io/projected/611d8207-d4ee-4548-9eaf-2a21f0587471-kube-api-access-g4fgt\") on node \"crc\" DevicePath \"\"" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.442309 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.468745 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "611d8207-d4ee-4548-9eaf-2a21f0587471" (UID: "611d8207-d4ee-4548-9eaf-2a21f0587471"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.506048 4835 scope.go:117] "RemoveContainer" containerID="8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4" Feb 02 17:14:03 crc kubenswrapper[4835]: E0202 17:14:03.506981 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4\": container with ID starting with 8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4 not found: ID does not exist" containerID="8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.507021 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4"} err="failed to get container status \"8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4\": rpc error: code = NotFound desc = could not find container \"8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4\": container with ID starting with 8c976e638061b6395f975662751794ef70052ee3e821dc58ea32667f157a51b4 not found: ID does not exist" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.507045 4835 scope.go:117] "RemoveContainer" containerID="41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4" Feb 02 17:14:03 crc kubenswrapper[4835]: E0202 17:14:03.507492 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4\": container with ID starting with 41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4 not found: ID does not exist" containerID="41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.507541 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4"} err="failed to get container status \"41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4\": rpc error: code = NotFound desc = could not find container \"41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4\": container with ID starting with 41db75336ac02ee1cd2ac9b6b9c10f564b63d6c0bce4c7519812d858b39de8b4 not found: ID does not exist" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.507572 4835 scope.go:117] "RemoveContainer" containerID="7013ad65b4ee818e3e1b299c30e3bc94c1b11e0138024de2b063cbbeaaaac9ff" Feb 02 17:14:03 crc kubenswrapper[4835]: E0202 17:14:03.507904 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7013ad65b4ee818e3e1b299c30e3bc94c1b11e0138024de2b063cbbeaaaac9ff\": container with ID starting with 7013ad65b4ee818e3e1b299c30e3bc94c1b11e0138024de2b063cbbeaaaac9ff not found: ID does not exist" containerID="7013ad65b4ee818e3e1b299c30e3bc94c1b11e0138024de2b063cbbeaaaac9ff" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.507941 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7013ad65b4ee818e3e1b299c30e3bc94c1b11e0138024de2b063cbbeaaaac9ff"} err="failed to get container status \"7013ad65b4ee818e3e1b299c30e3bc94c1b11e0138024de2b063cbbeaaaac9ff\": rpc error: code = NotFound desc = could not find container \"7013ad65b4ee818e3e1b299c30e3bc94c1b11e0138024de2b063cbbeaaaac9ff\": container with ID starting with 7013ad65b4ee818e3e1b299c30e3bc94c1b11e0138024de2b063cbbeaaaac9ff not found: ID does not exist" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.544147 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/611d8207-d4ee-4548-9eaf-2a21f0587471-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.720044 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5b85g"] Feb 02 17:14:03 crc kubenswrapper[4835]: I0202 17:14:03.729890 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5b85g"] Feb 02 17:14:05 crc kubenswrapper[4835]: I0202 17:14:05.201542 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="611d8207-d4ee-4548-9eaf-2a21f0587471" path="/var/lib/kubelet/pods/611d8207-d4ee-4548-9eaf-2a21f0587471/volumes" Feb 02 17:14:10 crc kubenswrapper[4835]: I0202 17:14:10.675277 4835 scope.go:117] "RemoveContainer" containerID="edf347c315fbdc6e55783e0ae6fd91c66747996d27717a753f2ddc82c1af51a6" Feb 02 17:14:10 crc kubenswrapper[4835]: I0202 17:14:10.711890 4835 scope.go:117] "RemoveContainer" containerID="4efd4654e3ef3dd85035cf5585c8400033aabedc0f219ff97315630e8ab032ea" Feb 02 17:14:10 crc kubenswrapper[4835]: I0202 17:14:10.760171 4835 scope.go:117] "RemoveContainer" containerID="ce0cdf63120ce260b9d13b5cf20294ae008be1b808035648165fe5131fcd188c" Feb 02 17:14:14 crc kubenswrapper[4835]: I0202 17:14:14.870533 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:14:14 crc kubenswrapper[4835]: I0202 17:14:14.871077 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.543194 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9g294"] Feb 02 17:14:25 crc kubenswrapper[4835]: E0202 17:14:25.546526 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="611d8207-d4ee-4548-9eaf-2a21f0587471" containerName="extract-content" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.546562 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="611d8207-d4ee-4548-9eaf-2a21f0587471" containerName="extract-content" Feb 02 17:14:25 crc kubenswrapper[4835]: E0202 17:14:25.546595 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="611d8207-d4ee-4548-9eaf-2a21f0587471" containerName="registry-server" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.546611 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="611d8207-d4ee-4548-9eaf-2a21f0587471" containerName="registry-server" Feb 02 17:14:25 crc kubenswrapper[4835]: E0202 17:14:25.546650 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="611d8207-d4ee-4548-9eaf-2a21f0587471" containerName="extract-utilities" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.546669 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="611d8207-d4ee-4548-9eaf-2a21f0587471" containerName="extract-utilities" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.552655 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="611d8207-d4ee-4548-9eaf-2a21f0587471" containerName="registry-server" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.553827 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9g294"] Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.553912 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.665501 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-catalog-content\") pod \"community-operators-9g294\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.665573 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-utilities\") pod \"community-operators-9g294\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.665665 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbtw4\" (UniqueName: \"kubernetes.io/projected/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-kube-api-access-hbtw4\") pod \"community-operators-9g294\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.767170 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-catalog-content\") pod \"community-operators-9g294\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.767245 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-utilities\") pod \"community-operators-9g294\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.767349 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbtw4\" (UniqueName: \"kubernetes.io/projected/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-kube-api-access-hbtw4\") pod \"community-operators-9g294\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.767655 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-catalog-content\") pod \"community-operators-9g294\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.767753 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-utilities\") pod \"community-operators-9g294\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.788861 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbtw4\" (UniqueName: \"kubernetes.io/projected/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-kube-api-access-hbtw4\") pod \"community-operators-9g294\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:25 crc kubenswrapper[4835]: I0202 17:14:25.883134 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:26 crc kubenswrapper[4835]: I0202 17:14:26.380651 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9g294"] Feb 02 17:14:26 crc kubenswrapper[4835]: I0202 17:14:26.578086 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g294" event={"ID":"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3","Type":"ContainerStarted","Data":"02673616b74f6388009532ac4afd66b73c83d4699ccfe606430d7ee6318f5abc"} Feb 02 17:14:27 crc kubenswrapper[4835]: I0202 17:14:27.587069 4835 generic.go:334] "Generic (PLEG): container finished" podID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" containerID="9905c9a06126890b0bd2a20722c7f6d54c7668518f7dbdb2167487f40af099bc" exitCode=0 Feb 02 17:14:27 crc kubenswrapper[4835]: I0202 17:14:27.587197 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g294" event={"ID":"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3","Type":"ContainerDied","Data":"9905c9a06126890b0bd2a20722c7f6d54c7668518f7dbdb2167487f40af099bc"} Feb 02 17:14:28 crc kubenswrapper[4835]: I0202 17:14:28.600021 4835 generic.go:334] "Generic (PLEG): container finished" podID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" containerID="39254fc2eb4cbc9c9dd82e8c41e5de1d27515ffbd1f8e61a059a585fdd92ed26" exitCode=0 Feb 02 17:14:28 crc kubenswrapper[4835]: I0202 17:14:28.600078 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g294" event={"ID":"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3","Type":"ContainerDied","Data":"39254fc2eb4cbc9c9dd82e8c41e5de1d27515ffbd1f8e61a059a585fdd92ed26"} Feb 02 17:14:29 crc kubenswrapper[4835]: I0202 17:14:29.612020 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g294" event={"ID":"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3","Type":"ContainerStarted","Data":"6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c"} Feb 02 17:14:29 crc kubenswrapper[4835]: I0202 17:14:29.638859 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9g294" podStartSLOduration=3.22802164 podStartE2EDuration="4.638842475s" podCreationTimestamp="2026-02-02 17:14:25 +0000 UTC" firstStartedPulling="2026-02-02 17:14:27.588998743 +0000 UTC m=+1459.210602833" lastFinishedPulling="2026-02-02 17:14:28.999819568 +0000 UTC m=+1460.621423668" observedRunningTime="2026-02-02 17:14:29.627811321 +0000 UTC m=+1461.249415421" watchObservedRunningTime="2026-02-02 17:14:29.638842475 +0000 UTC m=+1461.260446555" Feb 02 17:14:35 crc kubenswrapper[4835]: I0202 17:14:35.883404 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:35 crc kubenswrapper[4835]: I0202 17:14:35.883904 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:35 crc kubenswrapper[4835]: I0202 17:14:35.947115 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:36 crc kubenswrapper[4835]: I0202 17:14:36.721171 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:36 crc kubenswrapper[4835]: I0202 17:14:36.765966 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9g294"] Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.591912 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2k2lz"] Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.594309 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.605117 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2k2lz"] Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.689954 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9g294" podUID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" containerName="registry-server" containerID="cri-o://6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c" gracePeriod=2 Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.705655 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc7g9\" (UniqueName: \"kubernetes.io/projected/f5e01feb-2d87-44c8-9784-624edd181925-kube-api-access-lc7g9\") pod \"redhat-marketplace-2k2lz\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.705808 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-utilities\") pod \"redhat-marketplace-2k2lz\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.705859 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-catalog-content\") pod \"redhat-marketplace-2k2lz\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.807562 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-utilities\") pod \"redhat-marketplace-2k2lz\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.808093 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-catalog-content\") pod \"redhat-marketplace-2k2lz\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.808234 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc7g9\" (UniqueName: \"kubernetes.io/projected/f5e01feb-2d87-44c8-9784-624edd181925-kube-api-access-lc7g9\") pod \"redhat-marketplace-2k2lz\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.808372 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-utilities\") pod \"redhat-marketplace-2k2lz\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.808594 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-catalog-content\") pod \"redhat-marketplace-2k2lz\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.827775 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc7g9\" (UniqueName: \"kubernetes.io/projected/f5e01feb-2d87-44c8-9784-624edd181925-kube-api-access-lc7g9\") pod \"redhat-marketplace-2k2lz\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:38 crc kubenswrapper[4835]: I0202 17:14:38.933631 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.537264 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2k2lz"] Feb 02 17:14:39 crc kubenswrapper[4835]: W0202 17:14:39.540633 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5e01feb_2d87_44c8_9784_624edd181925.slice/crio-8e9beca39affc928f51b35bf5998ee72755cb74fa21bdbf70ff767b5713ce155 WatchSource:0}: Error finding container 8e9beca39affc928f51b35bf5998ee72755cb74fa21bdbf70ff767b5713ce155: Status 404 returned error can't find the container with id 8e9beca39affc928f51b35bf5998ee72755cb74fa21bdbf70ff767b5713ce155 Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.595623 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.701181 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2k2lz" event={"ID":"f5e01feb-2d87-44c8-9784-624edd181925","Type":"ContainerStarted","Data":"ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0"} Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.701228 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2k2lz" event={"ID":"f5e01feb-2d87-44c8-9784-624edd181925","Type":"ContainerStarted","Data":"8e9beca39affc928f51b35bf5998ee72755cb74fa21bdbf70ff767b5713ce155"} Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.708153 4835 generic.go:334] "Generic (PLEG): container finished" podID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" containerID="6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c" exitCode=0 Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.708198 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9g294" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.708216 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g294" event={"ID":"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3","Type":"ContainerDied","Data":"6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c"} Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.708248 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9g294" event={"ID":"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3","Type":"ContainerDied","Data":"02673616b74f6388009532ac4afd66b73c83d4699ccfe606430d7ee6318f5abc"} Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.708314 4835 scope.go:117] "RemoveContainer" containerID="6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.732207 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-catalog-content\") pod \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.732315 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbtw4\" (UniqueName: \"kubernetes.io/projected/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-kube-api-access-hbtw4\") pod \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.732368 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-utilities\") pod \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\" (UID: \"2e0eb480-27f0-4ce3-86e6-6d92c51fafb3\") " Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.733223 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-utilities" (OuterVolumeSpecName: "utilities") pod "2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" (UID: "2e0eb480-27f0-4ce3-86e6-6d92c51fafb3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.738534 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-kube-api-access-hbtw4" (OuterVolumeSpecName: "kube-api-access-hbtw4") pod "2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" (UID: "2e0eb480-27f0-4ce3-86e6-6d92c51fafb3"). InnerVolumeSpecName "kube-api-access-hbtw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.779679 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" (UID: "2e0eb480-27f0-4ce3-86e6-6d92c51fafb3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.786301 4835 scope.go:117] "RemoveContainer" containerID="39254fc2eb4cbc9c9dd82e8c41e5de1d27515ffbd1f8e61a059a585fdd92ed26" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.808546 4835 scope.go:117] "RemoveContainer" containerID="9905c9a06126890b0bd2a20722c7f6d54c7668518f7dbdb2167487f40af099bc" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.834920 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.834948 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbtw4\" (UniqueName: \"kubernetes.io/projected/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-kube-api-access-hbtw4\") on node \"crc\" DevicePath \"\"" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.834958 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.852360 4835 scope.go:117] "RemoveContainer" containerID="6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c" Feb 02 17:14:39 crc kubenswrapper[4835]: E0202 17:14:39.853156 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c\": container with ID starting with 6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c not found: ID does not exist" containerID="6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.853200 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c"} err="failed to get container status \"6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c\": rpc error: code = NotFound desc = could not find container \"6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c\": container with ID starting with 6777112d7dae526b2d63dd0aa2f4719cd1e3da9fca04f14af08e33a5c0ea9f4c not found: ID does not exist" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.853230 4835 scope.go:117] "RemoveContainer" containerID="39254fc2eb4cbc9c9dd82e8c41e5de1d27515ffbd1f8e61a059a585fdd92ed26" Feb 02 17:14:39 crc kubenswrapper[4835]: E0202 17:14:39.853803 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39254fc2eb4cbc9c9dd82e8c41e5de1d27515ffbd1f8e61a059a585fdd92ed26\": container with ID starting with 39254fc2eb4cbc9c9dd82e8c41e5de1d27515ffbd1f8e61a059a585fdd92ed26 not found: ID does not exist" containerID="39254fc2eb4cbc9c9dd82e8c41e5de1d27515ffbd1f8e61a059a585fdd92ed26" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.853837 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39254fc2eb4cbc9c9dd82e8c41e5de1d27515ffbd1f8e61a059a585fdd92ed26"} err="failed to get container status \"39254fc2eb4cbc9c9dd82e8c41e5de1d27515ffbd1f8e61a059a585fdd92ed26\": rpc error: code = NotFound desc = could not find container \"39254fc2eb4cbc9c9dd82e8c41e5de1d27515ffbd1f8e61a059a585fdd92ed26\": container with ID starting with 39254fc2eb4cbc9c9dd82e8c41e5de1d27515ffbd1f8e61a059a585fdd92ed26 not found: ID does not exist" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.853860 4835 scope.go:117] "RemoveContainer" containerID="9905c9a06126890b0bd2a20722c7f6d54c7668518f7dbdb2167487f40af099bc" Feb 02 17:14:39 crc kubenswrapper[4835]: E0202 17:14:39.854170 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9905c9a06126890b0bd2a20722c7f6d54c7668518f7dbdb2167487f40af099bc\": container with ID starting with 9905c9a06126890b0bd2a20722c7f6d54c7668518f7dbdb2167487f40af099bc not found: ID does not exist" containerID="9905c9a06126890b0bd2a20722c7f6d54c7668518f7dbdb2167487f40af099bc" Feb 02 17:14:39 crc kubenswrapper[4835]: I0202 17:14:39.854227 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9905c9a06126890b0bd2a20722c7f6d54c7668518f7dbdb2167487f40af099bc"} err="failed to get container status \"9905c9a06126890b0bd2a20722c7f6d54c7668518f7dbdb2167487f40af099bc\": rpc error: code = NotFound desc = could not find container \"9905c9a06126890b0bd2a20722c7f6d54c7668518f7dbdb2167487f40af099bc\": container with ID starting with 9905c9a06126890b0bd2a20722c7f6d54c7668518f7dbdb2167487f40af099bc not found: ID does not exist" Feb 02 17:14:40 crc kubenswrapper[4835]: I0202 17:14:40.043873 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9g294"] Feb 02 17:14:40 crc kubenswrapper[4835]: I0202 17:14:40.050699 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9g294"] Feb 02 17:14:40 crc kubenswrapper[4835]: I0202 17:14:40.718291 4835 generic.go:334] "Generic (PLEG): container finished" podID="f5e01feb-2d87-44c8-9784-624edd181925" containerID="ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0" exitCode=0 Feb 02 17:14:40 crc kubenswrapper[4835]: I0202 17:14:40.718356 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2k2lz" event={"ID":"f5e01feb-2d87-44c8-9784-624edd181925","Type":"ContainerDied","Data":"ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0"} Feb 02 17:14:41 crc kubenswrapper[4835]: I0202 17:14:41.201969 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" path="/var/lib/kubelet/pods/2e0eb480-27f0-4ce3-86e6-6d92c51fafb3/volumes" Feb 02 17:14:41 crc kubenswrapper[4835]: I0202 17:14:41.733838 4835 generic.go:334] "Generic (PLEG): container finished" podID="f5e01feb-2d87-44c8-9784-624edd181925" containerID="85256fcb5427d01b94b544d0cb1a0db78b543d2f3ceacd8554792772907cf2da" exitCode=0 Feb 02 17:14:41 crc kubenswrapper[4835]: I0202 17:14:41.733891 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2k2lz" event={"ID":"f5e01feb-2d87-44c8-9784-624edd181925","Type":"ContainerDied","Data":"85256fcb5427d01b94b544d0cb1a0db78b543d2f3ceacd8554792772907cf2da"} Feb 02 17:14:42 crc kubenswrapper[4835]: I0202 17:14:42.743761 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2k2lz" event={"ID":"f5e01feb-2d87-44c8-9784-624edd181925","Type":"ContainerStarted","Data":"4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b"} Feb 02 17:14:42 crc kubenswrapper[4835]: I0202 17:14:42.769186 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2k2lz" podStartSLOduration=3.343758275 podStartE2EDuration="4.769165836s" podCreationTimestamp="2026-02-02 17:14:38 +0000 UTC" firstStartedPulling="2026-02-02 17:14:40.720516657 +0000 UTC m=+1472.342120737" lastFinishedPulling="2026-02-02 17:14:42.145924218 +0000 UTC m=+1473.767528298" observedRunningTime="2026-02-02 17:14:42.758683229 +0000 UTC m=+1474.380287319" watchObservedRunningTime="2026-02-02 17:14:42.769165836 +0000 UTC m=+1474.390769916" Feb 02 17:14:44 crc kubenswrapper[4835]: I0202 17:14:44.870635 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:14:44 crc kubenswrapper[4835]: I0202 17:14:44.870952 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:14:44 crc kubenswrapper[4835]: I0202 17:14:44.870996 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:14:44 crc kubenswrapper[4835]: I0202 17:14:44.871551 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"50373b29385bc4901c51a5e8702a6916b31f719329b1f21a631ab633cc9521bd"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:14:44 crc kubenswrapper[4835]: I0202 17:14:44.871615 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://50373b29385bc4901c51a5e8702a6916b31f719329b1f21a631ab633cc9521bd" gracePeriod=600 Feb 02 17:14:45 crc kubenswrapper[4835]: I0202 17:14:45.784629 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="50373b29385bc4901c51a5e8702a6916b31f719329b1f21a631ab633cc9521bd" exitCode=0 Feb 02 17:14:45 crc kubenswrapper[4835]: I0202 17:14:45.785010 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"50373b29385bc4901c51a5e8702a6916b31f719329b1f21a631ab633cc9521bd"} Feb 02 17:14:45 crc kubenswrapper[4835]: I0202 17:14:45.785048 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94"} Feb 02 17:14:45 crc kubenswrapper[4835]: I0202 17:14:45.785071 4835 scope.go:117] "RemoveContainer" containerID="d1d745ca83c0b5216f384fd386fa76fd9b97cc7c8d5d53ff568a50a85b837b86" Feb 02 17:14:48 crc kubenswrapper[4835]: I0202 17:14:48.934932 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:48 crc kubenswrapper[4835]: I0202 17:14:48.935487 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:49 crc kubenswrapper[4835]: I0202 17:14:49.004319 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:49 crc kubenswrapper[4835]: I0202 17:14:49.878536 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:49 crc kubenswrapper[4835]: I0202 17:14:49.924444 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2k2lz"] Feb 02 17:14:51 crc kubenswrapper[4835]: I0202 17:14:51.844563 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2k2lz" podUID="f5e01feb-2d87-44c8-9784-624edd181925" containerName="registry-server" containerID="cri-o://4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b" gracePeriod=2 Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.276583 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.360760 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc7g9\" (UniqueName: \"kubernetes.io/projected/f5e01feb-2d87-44c8-9784-624edd181925-kube-api-access-lc7g9\") pod \"f5e01feb-2d87-44c8-9784-624edd181925\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.360968 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-catalog-content\") pod \"f5e01feb-2d87-44c8-9784-624edd181925\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.361046 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-utilities\") pod \"f5e01feb-2d87-44c8-9784-624edd181925\" (UID: \"f5e01feb-2d87-44c8-9784-624edd181925\") " Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.361966 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-utilities" (OuterVolumeSpecName: "utilities") pod "f5e01feb-2d87-44c8-9784-624edd181925" (UID: "f5e01feb-2d87-44c8-9784-624edd181925"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.366299 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5e01feb-2d87-44c8-9784-624edd181925-kube-api-access-lc7g9" (OuterVolumeSpecName: "kube-api-access-lc7g9") pod "f5e01feb-2d87-44c8-9784-624edd181925" (UID: "f5e01feb-2d87-44c8-9784-624edd181925"). InnerVolumeSpecName "kube-api-access-lc7g9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.386086 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f5e01feb-2d87-44c8-9784-624edd181925" (UID: "f5e01feb-2d87-44c8-9784-624edd181925"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.463688 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.463752 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc7g9\" (UniqueName: \"kubernetes.io/projected/f5e01feb-2d87-44c8-9784-624edd181925-kube-api-access-lc7g9\") on node \"crc\" DevicePath \"\"" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.463767 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e01feb-2d87-44c8-9784-624edd181925-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.857241 4835 generic.go:334] "Generic (PLEG): container finished" podID="f5e01feb-2d87-44c8-9784-624edd181925" containerID="4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b" exitCode=0 Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.857315 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2k2lz" event={"ID":"f5e01feb-2d87-44c8-9784-624edd181925","Type":"ContainerDied","Data":"4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b"} Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.857371 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2k2lz" event={"ID":"f5e01feb-2d87-44c8-9784-624edd181925","Type":"ContainerDied","Data":"8e9beca39affc928f51b35bf5998ee72755cb74fa21bdbf70ff767b5713ce155"} Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.857391 4835 scope.go:117] "RemoveContainer" containerID="4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.859437 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2k2lz" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.895254 4835 scope.go:117] "RemoveContainer" containerID="85256fcb5427d01b94b544d0cb1a0db78b543d2f3ceacd8554792772907cf2da" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.897078 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2k2lz"] Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.907122 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2k2lz"] Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.916898 4835 scope.go:117] "RemoveContainer" containerID="ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.959323 4835 scope.go:117] "RemoveContainer" containerID="4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b" Feb 02 17:14:52 crc kubenswrapper[4835]: E0202 17:14:52.959716 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b\": container with ID starting with 4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b not found: ID does not exist" containerID="4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.959744 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b"} err="failed to get container status \"4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b\": rpc error: code = NotFound desc = could not find container \"4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b\": container with ID starting with 4d27f8297076ff0a811f89561163bda9b22f5f41d1d2479f279d20bdb927515b not found: ID does not exist" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.959768 4835 scope.go:117] "RemoveContainer" containerID="85256fcb5427d01b94b544d0cb1a0db78b543d2f3ceacd8554792772907cf2da" Feb 02 17:14:52 crc kubenswrapper[4835]: E0202 17:14:52.960069 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85256fcb5427d01b94b544d0cb1a0db78b543d2f3ceacd8554792772907cf2da\": container with ID starting with 85256fcb5427d01b94b544d0cb1a0db78b543d2f3ceacd8554792772907cf2da not found: ID does not exist" containerID="85256fcb5427d01b94b544d0cb1a0db78b543d2f3ceacd8554792772907cf2da" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.960096 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85256fcb5427d01b94b544d0cb1a0db78b543d2f3ceacd8554792772907cf2da"} err="failed to get container status \"85256fcb5427d01b94b544d0cb1a0db78b543d2f3ceacd8554792772907cf2da\": rpc error: code = NotFound desc = could not find container \"85256fcb5427d01b94b544d0cb1a0db78b543d2f3ceacd8554792772907cf2da\": container with ID starting with 85256fcb5427d01b94b544d0cb1a0db78b543d2f3ceacd8554792772907cf2da not found: ID does not exist" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.960111 4835 scope.go:117] "RemoveContainer" containerID="ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0" Feb 02 17:14:52 crc kubenswrapper[4835]: E0202 17:14:52.960394 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0\": container with ID starting with ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0 not found: ID does not exist" containerID="ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0" Feb 02 17:14:52 crc kubenswrapper[4835]: I0202 17:14:52.960445 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0"} err="failed to get container status \"ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0\": rpc error: code = NotFound desc = could not find container \"ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0\": container with ID starting with ec058ed525b2599843651ec6be0d551ff40a0a6a564a189fdb456ec11a5f1dc0 not found: ID does not exist" Feb 02 17:14:53 crc kubenswrapper[4835]: I0202 17:14:53.197852 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5e01feb-2d87-44c8-9784-624edd181925" path="/var/lib/kubelet/pods/f5e01feb-2d87-44c8-9784-624edd181925/volumes" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.146932 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87"] Feb 02 17:15:00 crc kubenswrapper[4835]: E0202 17:15:00.147871 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" containerName="extract-content" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.147886 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" containerName="extract-content" Feb 02 17:15:00 crc kubenswrapper[4835]: E0202 17:15:00.147900 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" containerName="registry-server" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.147908 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" containerName="registry-server" Feb 02 17:15:00 crc kubenswrapper[4835]: E0202 17:15:00.147923 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" containerName="extract-utilities" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.147932 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" containerName="extract-utilities" Feb 02 17:15:00 crc kubenswrapper[4835]: E0202 17:15:00.147960 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e01feb-2d87-44c8-9784-624edd181925" containerName="extract-utilities" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.147968 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e01feb-2d87-44c8-9784-624edd181925" containerName="extract-utilities" Feb 02 17:15:00 crc kubenswrapper[4835]: E0202 17:15:00.147983 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e01feb-2d87-44c8-9784-624edd181925" containerName="extract-content" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.147990 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e01feb-2d87-44c8-9784-624edd181925" containerName="extract-content" Feb 02 17:15:00 crc kubenswrapper[4835]: E0202 17:15:00.148004 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e01feb-2d87-44c8-9784-624edd181925" containerName="registry-server" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.148011 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e01feb-2d87-44c8-9784-624edd181925" containerName="registry-server" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.148218 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5e01feb-2d87-44c8-9784-624edd181925" containerName="registry-server" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.148235 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e0eb480-27f0-4ce3-86e6-6d92c51fafb3" containerName="registry-server" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.149038 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.153118 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.157381 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87"] Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.158121 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.210372 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-config-volume\") pod \"collect-profiles-29500875-q5x87\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.210724 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmlk4\" (UniqueName: \"kubernetes.io/projected/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-kube-api-access-wmlk4\") pod \"collect-profiles-29500875-q5x87\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.210760 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-secret-volume\") pod \"collect-profiles-29500875-q5x87\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.312470 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-config-volume\") pod \"collect-profiles-29500875-q5x87\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.312604 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmlk4\" (UniqueName: \"kubernetes.io/projected/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-kube-api-access-wmlk4\") pod \"collect-profiles-29500875-q5x87\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.312641 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-secret-volume\") pod \"collect-profiles-29500875-q5x87\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.313612 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-config-volume\") pod \"collect-profiles-29500875-q5x87\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.320402 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-secret-volume\") pod \"collect-profiles-29500875-q5x87\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.328972 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmlk4\" (UniqueName: \"kubernetes.io/projected/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-kube-api-access-wmlk4\") pod \"collect-profiles-29500875-q5x87\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.476948 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.913022 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87"] Feb 02 17:15:00 crc kubenswrapper[4835]: I0202 17:15:00.944217 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" event={"ID":"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f","Type":"ContainerStarted","Data":"6808e1b01eb7bcd15021d45ed406704809d46efefb660be48b5b036a886c99c3"} Feb 02 17:15:01 crc kubenswrapper[4835]: I0202 17:15:01.953844 4835 generic.go:334] "Generic (PLEG): container finished" podID="aa1b6455-7b51-4d7d-8cb8-35115bba7b0f" containerID="53dc8d7746aad81ef72b772ba618ea4c01a3e1675f43a25160f5eb2774888a56" exitCode=0 Feb 02 17:15:01 crc kubenswrapper[4835]: I0202 17:15:01.953892 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" event={"ID":"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f","Type":"ContainerDied","Data":"53dc8d7746aad81ef72b772ba618ea4c01a3e1675f43a25160f5eb2774888a56"} Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.333360 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.364902 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmlk4\" (UniqueName: \"kubernetes.io/projected/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-kube-api-access-wmlk4\") pod \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.365202 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-config-volume\") pod \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.365933 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-config-volume" (OuterVolumeSpecName: "config-volume") pod "aa1b6455-7b51-4d7d-8cb8-35115bba7b0f" (UID: "aa1b6455-7b51-4d7d-8cb8-35115bba7b0f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.365999 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-secret-volume\") pod \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\" (UID: \"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f\") " Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.366728 4835 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.372103 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-kube-api-access-wmlk4" (OuterVolumeSpecName: "kube-api-access-wmlk4") pod "aa1b6455-7b51-4d7d-8cb8-35115bba7b0f" (UID: "aa1b6455-7b51-4d7d-8cb8-35115bba7b0f"). InnerVolumeSpecName "kube-api-access-wmlk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.372617 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "aa1b6455-7b51-4d7d-8cb8-35115bba7b0f" (UID: "aa1b6455-7b51-4d7d-8cb8-35115bba7b0f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.482562 4835 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.482640 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmlk4\" (UniqueName: \"kubernetes.io/projected/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f-kube-api-access-wmlk4\") on node \"crc\" DevicePath \"\"" Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.977013 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" event={"ID":"aa1b6455-7b51-4d7d-8cb8-35115bba7b0f","Type":"ContainerDied","Data":"6808e1b01eb7bcd15021d45ed406704809d46efefb660be48b5b036a886c99c3"} Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.977737 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6808e1b01eb7bcd15021d45ed406704809d46efefb660be48b5b036a886c99c3" Feb 02 17:15:03 crc kubenswrapper[4835]: I0202 17:15:03.977095 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87" Feb 02 17:15:10 crc kubenswrapper[4835]: I0202 17:15:10.891145 4835 scope.go:117] "RemoveContainer" containerID="6bb1a423dfda41f2f60072f6535aedf014fa8e5b5c71ac1034f3795e9529485b" Feb 02 17:15:10 crc kubenswrapper[4835]: I0202 17:15:10.917812 4835 scope.go:117] "RemoveContainer" containerID="d6242456075e18d217ec38a5e9eb69e8241a701fc7240e4df1dbd643e84fbace" Feb 02 17:15:10 crc kubenswrapper[4835]: I0202 17:15:10.959104 4835 scope.go:117] "RemoveContainer" containerID="d1e4ec6585a148572f18d9d45b46bd74d07faf1a6518056dacc55744cd1ac570" Feb 02 17:15:53 crc kubenswrapper[4835]: I0202 17:15:53.412946 4835 generic.go:334] "Generic (PLEG): container finished" podID="ca98e7db-a3cd-4839-991d-d8a08b956675" containerID="c50efc1128a8d979799dffcd562cb64b1af0d2133b130187d442aad0e8ef5688" exitCode=0 Feb 02 17:15:53 crc kubenswrapper[4835]: I0202 17:15:53.413047 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" event={"ID":"ca98e7db-a3cd-4839-991d-d8a08b956675","Type":"ContainerDied","Data":"c50efc1128a8d979799dffcd562cb64b1af0d2133b130187d442aad0e8ef5688"} Feb 02 17:15:54 crc kubenswrapper[4835]: I0202 17:15:54.861257 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.041270 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzlwm\" (UniqueName: \"kubernetes.io/projected/ca98e7db-a3cd-4839-991d-d8a08b956675-kube-api-access-fzlwm\") pod \"ca98e7db-a3cd-4839-991d-d8a08b956675\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.041553 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-inventory\") pod \"ca98e7db-a3cd-4839-991d-d8a08b956675\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.041668 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-bootstrap-combined-ca-bundle\") pod \"ca98e7db-a3cd-4839-991d-d8a08b956675\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.041798 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-ssh-key-openstack-edpm-ipam\") pod \"ca98e7db-a3cd-4839-991d-d8a08b956675\" (UID: \"ca98e7db-a3cd-4839-991d-d8a08b956675\") " Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.053084 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "ca98e7db-a3cd-4839-991d-d8a08b956675" (UID: "ca98e7db-a3cd-4839-991d-d8a08b956675"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.053159 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca98e7db-a3cd-4839-991d-d8a08b956675-kube-api-access-fzlwm" (OuterVolumeSpecName: "kube-api-access-fzlwm") pod "ca98e7db-a3cd-4839-991d-d8a08b956675" (UID: "ca98e7db-a3cd-4839-991d-d8a08b956675"). InnerVolumeSpecName "kube-api-access-fzlwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.084010 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ca98e7db-a3cd-4839-991d-d8a08b956675" (UID: "ca98e7db-a3cd-4839-991d-d8a08b956675"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.086900 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-inventory" (OuterVolumeSpecName: "inventory") pod "ca98e7db-a3cd-4839-991d-d8a08b956675" (UID: "ca98e7db-a3cd-4839-991d-d8a08b956675"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.144736 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.144786 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzlwm\" (UniqueName: \"kubernetes.io/projected/ca98e7db-a3cd-4839-991d-d8a08b956675-kube-api-access-fzlwm\") on node \"crc\" DevicePath \"\"" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.144869 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.144915 4835 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca98e7db-a3cd-4839-991d-d8a08b956675-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.433710 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" event={"ID":"ca98e7db-a3cd-4839-991d-d8a08b956675","Type":"ContainerDied","Data":"f2610bce23d5dadc7f9d02f75fa1c26fae42d4f739e7a4837fb7c2ce2fa6def6"} Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.433756 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2610bce23d5dadc7f9d02f75fa1c26fae42d4f739e7a4837fb7c2ce2fa6def6" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.433763 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.528720 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz"] Feb 02 17:15:55 crc kubenswrapper[4835]: E0202 17:15:55.529521 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca98e7db-a3cd-4839-991d-d8a08b956675" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.529553 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca98e7db-a3cd-4839-991d-d8a08b956675" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 17:15:55 crc kubenswrapper[4835]: E0202 17:15:55.529586 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa1b6455-7b51-4d7d-8cb8-35115bba7b0f" containerName="collect-profiles" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.529596 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa1b6455-7b51-4d7d-8cb8-35115bba7b0f" containerName="collect-profiles" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.529890 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca98e7db-a3cd-4839-991d-d8a08b956675" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.530082 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa1b6455-7b51-4d7d-8cb8-35115bba7b0f" containerName="collect-profiles" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.530817 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.533747 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.533929 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.534323 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.535478 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.545795 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz"] Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.666034 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.666142 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.666168 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr66c\" (UniqueName: \"kubernetes.io/projected/6e3760b5-3825-432e-8c36-31ded716eb2f-kube-api-access-pr66c\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.767586 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.767712 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.767742 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr66c\" (UniqueName: \"kubernetes.io/projected/6e3760b5-3825-432e-8c36-31ded716eb2f-kube-api-access-pr66c\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.775889 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.776096 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.787425 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr66c\" (UniqueName: \"kubernetes.io/projected/6e3760b5-3825-432e-8c36-31ded716eb2f-kube-api-access-pr66c\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:15:55 crc kubenswrapper[4835]: I0202 17:15:55.866406 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:15:56 crc kubenswrapper[4835]: I0202 17:15:56.380630 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz"] Feb 02 17:15:56 crc kubenswrapper[4835]: I0202 17:15:56.442326 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" event={"ID":"6e3760b5-3825-432e-8c36-31ded716eb2f","Type":"ContainerStarted","Data":"f5ed1541f4d04e21a534a2d0d16b4e64b3318159973a89e90a15901b9bf0971f"} Feb 02 17:15:57 crc kubenswrapper[4835]: I0202 17:15:57.454642 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" event={"ID":"6e3760b5-3825-432e-8c36-31ded716eb2f","Type":"ContainerStarted","Data":"2d160de772ee3ce919d1b438e8e7323db56e8d31d7e5b6106242c5f0bf86587f"} Feb 02 17:15:58 crc kubenswrapper[4835]: I0202 17:15:58.501734 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" podStartSLOduration=2.718202228 podStartE2EDuration="3.50169809s" podCreationTimestamp="2026-02-02 17:15:55 +0000 UTC" firstStartedPulling="2026-02-02 17:15:56.385250486 +0000 UTC m=+1548.006854566" lastFinishedPulling="2026-02-02 17:15:57.168746348 +0000 UTC m=+1548.790350428" observedRunningTime="2026-02-02 17:15:58.478703277 +0000 UTC m=+1550.100307387" watchObservedRunningTime="2026-02-02 17:15:58.50169809 +0000 UTC m=+1550.123302210" Feb 02 17:16:11 crc kubenswrapper[4835]: I0202 17:16:11.088971 4835 scope.go:117] "RemoveContainer" containerID="40722646456bae29e7e4f33bfdda31d100f0995b32dd23015176fb24a5c5b2ab" Feb 02 17:16:11 crc kubenswrapper[4835]: I0202 17:16:11.120896 4835 scope.go:117] "RemoveContainer" containerID="99192e86364a156adb02e21e88b11858517a5cef85e8314cff4c849a4a1ec495" Feb 02 17:17:02 crc kubenswrapper[4835]: I0202 17:17:02.021500 4835 generic.go:334] "Generic (PLEG): container finished" podID="6e3760b5-3825-432e-8c36-31ded716eb2f" containerID="2d160de772ee3ce919d1b438e8e7323db56e8d31d7e5b6106242c5f0bf86587f" exitCode=0 Feb 02 17:17:02 crc kubenswrapper[4835]: I0202 17:17:02.022119 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" event={"ID":"6e3760b5-3825-432e-8c36-31ded716eb2f","Type":"ContainerDied","Data":"2d160de772ee3ce919d1b438e8e7323db56e8d31d7e5b6106242c5f0bf86587f"} Feb 02 17:17:03 crc kubenswrapper[4835]: I0202 17:17:03.453100 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:17:03 crc kubenswrapper[4835]: I0202 17:17:03.645635 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-inventory\") pod \"6e3760b5-3825-432e-8c36-31ded716eb2f\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " Feb 02 17:17:03 crc kubenswrapper[4835]: I0202 17:17:03.645721 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr66c\" (UniqueName: \"kubernetes.io/projected/6e3760b5-3825-432e-8c36-31ded716eb2f-kube-api-access-pr66c\") pod \"6e3760b5-3825-432e-8c36-31ded716eb2f\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " Feb 02 17:17:03 crc kubenswrapper[4835]: I0202 17:17:03.645823 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-ssh-key-openstack-edpm-ipam\") pod \"6e3760b5-3825-432e-8c36-31ded716eb2f\" (UID: \"6e3760b5-3825-432e-8c36-31ded716eb2f\") " Feb 02 17:17:03 crc kubenswrapper[4835]: I0202 17:17:03.652127 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e3760b5-3825-432e-8c36-31ded716eb2f-kube-api-access-pr66c" (OuterVolumeSpecName: "kube-api-access-pr66c") pod "6e3760b5-3825-432e-8c36-31ded716eb2f" (UID: "6e3760b5-3825-432e-8c36-31ded716eb2f"). InnerVolumeSpecName "kube-api-access-pr66c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:17:03 crc kubenswrapper[4835]: I0202 17:17:03.673112 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-inventory" (OuterVolumeSpecName: "inventory") pod "6e3760b5-3825-432e-8c36-31ded716eb2f" (UID: "6e3760b5-3825-432e-8c36-31ded716eb2f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:17:03 crc kubenswrapper[4835]: I0202 17:17:03.675924 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "6e3760b5-3825-432e-8c36-31ded716eb2f" (UID: "6e3760b5-3825-432e-8c36-31ded716eb2f"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:17:03 crc kubenswrapper[4835]: I0202 17:17:03.748328 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:03 crc kubenswrapper[4835]: I0202 17:17:03.748370 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr66c\" (UniqueName: \"kubernetes.io/projected/6e3760b5-3825-432e-8c36-31ded716eb2f-kube-api-access-pr66c\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:03 crc kubenswrapper[4835]: I0202 17:17:03.748387 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6e3760b5-3825-432e-8c36-31ded716eb2f-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.042471 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" event={"ID":"6e3760b5-3825-432e-8c36-31ded716eb2f","Type":"ContainerDied","Data":"f5ed1541f4d04e21a534a2d0d16b4e64b3318159973a89e90a15901b9bf0971f"} Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.042518 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5ed1541f4d04e21a534a2d0d16b4e64b3318159973a89e90a15901b9bf0971f" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.042544 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.207592 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql"] Feb 02 17:17:04 crc kubenswrapper[4835]: E0202 17:17:04.208059 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e3760b5-3825-432e-8c36-31ded716eb2f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.208081 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e3760b5-3825-432e-8c36-31ded716eb2f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.208404 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e3760b5-3825-432e-8c36-31ded716eb2f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.209364 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.213908 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.214108 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.214207 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.214327 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.218738 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql"] Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.359872 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pghzh\" (UniqueName: \"kubernetes.io/projected/dedb5045-87f7-4433-9d73-06e16998ae40-kube-api-access-pghzh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t2kql\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.359930 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t2kql\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.360005 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t2kql\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.461607 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t2kql\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.461756 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pghzh\" (UniqueName: \"kubernetes.io/projected/dedb5045-87f7-4433-9d73-06e16998ae40-kube-api-access-pghzh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t2kql\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.461782 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t2kql\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.465704 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t2kql\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.466886 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t2kql\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.480059 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pghzh\" (UniqueName: \"kubernetes.io/projected/dedb5045-87f7-4433-9d73-06e16998ae40-kube-api-access-pghzh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t2kql\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:04 crc kubenswrapper[4835]: I0202 17:17:04.536627 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:05 crc kubenswrapper[4835]: I0202 17:17:05.057226 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql"] Feb 02 17:17:06 crc kubenswrapper[4835]: I0202 17:17:06.067977 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" event={"ID":"dedb5045-87f7-4433-9d73-06e16998ae40","Type":"ContainerStarted","Data":"4be4026455e714dc3e0fb11423e89471f8352b6a646a13dc82724d38ff003465"} Feb 02 17:17:06 crc kubenswrapper[4835]: I0202 17:17:06.068505 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" event={"ID":"dedb5045-87f7-4433-9d73-06e16998ae40","Type":"ContainerStarted","Data":"18da701873ed8ae942a3f265b1a14fcfaab379aeac1dc8300f9fe294bd263da9"} Feb 02 17:17:06 crc kubenswrapper[4835]: I0202 17:17:06.097889 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" podStartSLOduration=1.64521754 podStartE2EDuration="2.097870621s" podCreationTimestamp="2026-02-02 17:17:04 +0000 UTC" firstStartedPulling="2026-02-02 17:17:05.06835017 +0000 UTC m=+1616.689954250" lastFinishedPulling="2026-02-02 17:17:05.521003251 +0000 UTC m=+1617.142607331" observedRunningTime="2026-02-02 17:17:06.08975803 +0000 UTC m=+1617.711362150" watchObservedRunningTime="2026-02-02 17:17:06.097870621 +0000 UTC m=+1617.719474701" Feb 02 17:17:11 crc kubenswrapper[4835]: I0202 17:17:11.112269 4835 generic.go:334] "Generic (PLEG): container finished" podID="dedb5045-87f7-4433-9d73-06e16998ae40" containerID="4be4026455e714dc3e0fb11423e89471f8352b6a646a13dc82724d38ff003465" exitCode=0 Feb 02 17:17:11 crc kubenswrapper[4835]: I0202 17:17:11.112410 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" event={"ID":"dedb5045-87f7-4433-9d73-06e16998ae40","Type":"ContainerDied","Data":"4be4026455e714dc3e0fb11423e89471f8352b6a646a13dc82724d38ff003465"} Feb 02 17:17:12 crc kubenswrapper[4835]: I0202 17:17:12.514547 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:12 crc kubenswrapper[4835]: I0202 17:17:12.634146 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-inventory\") pod \"dedb5045-87f7-4433-9d73-06e16998ae40\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " Feb 02 17:17:12 crc kubenswrapper[4835]: I0202 17:17:12.635177 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-ssh-key-openstack-edpm-ipam\") pod \"dedb5045-87f7-4433-9d73-06e16998ae40\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " Feb 02 17:17:12 crc kubenswrapper[4835]: I0202 17:17:12.635253 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pghzh\" (UniqueName: \"kubernetes.io/projected/dedb5045-87f7-4433-9d73-06e16998ae40-kube-api-access-pghzh\") pod \"dedb5045-87f7-4433-9d73-06e16998ae40\" (UID: \"dedb5045-87f7-4433-9d73-06e16998ae40\") " Feb 02 17:17:12 crc kubenswrapper[4835]: I0202 17:17:12.640593 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dedb5045-87f7-4433-9d73-06e16998ae40-kube-api-access-pghzh" (OuterVolumeSpecName: "kube-api-access-pghzh") pod "dedb5045-87f7-4433-9d73-06e16998ae40" (UID: "dedb5045-87f7-4433-9d73-06e16998ae40"). InnerVolumeSpecName "kube-api-access-pghzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:17:12 crc kubenswrapper[4835]: I0202 17:17:12.662748 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-inventory" (OuterVolumeSpecName: "inventory") pod "dedb5045-87f7-4433-9d73-06e16998ae40" (UID: "dedb5045-87f7-4433-9d73-06e16998ae40"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:17:12 crc kubenswrapper[4835]: I0202 17:17:12.663347 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "dedb5045-87f7-4433-9d73-06e16998ae40" (UID: "dedb5045-87f7-4433-9d73-06e16998ae40"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:17:12 crc kubenswrapper[4835]: I0202 17:17:12.738370 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:12 crc kubenswrapper[4835]: I0202 17:17:12.738423 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/dedb5045-87f7-4433-9d73-06e16998ae40-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:12 crc kubenswrapper[4835]: I0202 17:17:12.738444 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pghzh\" (UniqueName: \"kubernetes.io/projected/dedb5045-87f7-4433-9d73-06e16998ae40-kube-api-access-pghzh\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.132741 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" event={"ID":"dedb5045-87f7-4433-9d73-06e16998ae40","Type":"ContainerDied","Data":"18da701873ed8ae942a3f265b1a14fcfaab379aeac1dc8300f9fe294bd263da9"} Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.132778 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18da701873ed8ae942a3f265b1a14fcfaab379aeac1dc8300f9fe294bd263da9" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.132783 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.207417 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g"] Feb 02 17:17:13 crc kubenswrapper[4835]: E0202 17:17:13.208099 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dedb5045-87f7-4433-9d73-06e16998ae40" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.208129 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="dedb5045-87f7-4433-9d73-06e16998ae40" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.208320 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="dedb5045-87f7-4433-9d73-06e16998ae40" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.208913 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.210099 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g"] Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.211565 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.211629 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.211854 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.222664 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.249418 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z6j8g\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.249484 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z6j8g\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.249548 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qj7bd\" (UniqueName: \"kubernetes.io/projected/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-kube-api-access-qj7bd\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z6j8g\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.350828 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z6j8g\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.351094 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z6j8g\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.351215 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qj7bd\" (UniqueName: \"kubernetes.io/projected/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-kube-api-access-qj7bd\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z6j8g\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.354049 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z6j8g\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.354058 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z6j8g\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.366811 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qj7bd\" (UniqueName: \"kubernetes.io/projected/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-kube-api-access-qj7bd\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z6j8g\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:13 crc kubenswrapper[4835]: I0202 17:17:13.527678 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:14 crc kubenswrapper[4835]: I0202 17:17:14.024419 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g"] Feb 02 17:17:14 crc kubenswrapper[4835]: I0202 17:17:14.142350 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" event={"ID":"a681c4be-8c84-473e-bbc9-b4eeacde7fa4","Type":"ContainerStarted","Data":"f8bc4a4645e4af1d33be3086d58c929c9b4957a235ad5bfb07ed68971ce9b176"} Feb 02 17:17:14 crc kubenswrapper[4835]: I0202 17:17:14.870232 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:17:14 crc kubenswrapper[4835]: I0202 17:17:14.870315 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:17:15 crc kubenswrapper[4835]: I0202 17:17:15.152396 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" event={"ID":"a681c4be-8c84-473e-bbc9-b4eeacde7fa4","Type":"ContainerStarted","Data":"d862a07fd27a783e73b6eae2d6546d7839e2a8535c9dc06c6d14b0e568e71af8"} Feb 02 17:17:15 crc kubenswrapper[4835]: I0202 17:17:15.176584 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" podStartSLOduration=1.707375713 podStartE2EDuration="2.176569854s" podCreationTimestamp="2026-02-02 17:17:13 +0000 UTC" firstStartedPulling="2026-02-02 17:17:14.018958053 +0000 UTC m=+1625.640562133" lastFinishedPulling="2026-02-02 17:17:14.488152184 +0000 UTC m=+1626.109756274" observedRunningTime="2026-02-02 17:17:15.173804636 +0000 UTC m=+1626.795408756" watchObservedRunningTime="2026-02-02 17:17:15.176569854 +0000 UTC m=+1626.798173934" Feb 02 17:17:44 crc kubenswrapper[4835]: I0202 17:17:44.870597 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:17:44 crc kubenswrapper[4835]: I0202 17:17:44.871096 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:17:46 crc kubenswrapper[4835]: I0202 17:17:46.040185 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-mswm4"] Feb 02 17:17:46 crc kubenswrapper[4835]: I0202 17:17:46.048059 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-mswm4"] Feb 02 17:17:47 crc kubenswrapper[4835]: I0202 17:17:47.041457 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-174f-account-create-update-g7l2c"] Feb 02 17:17:47 crc kubenswrapper[4835]: I0202 17:17:47.053388 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-174f-account-create-update-g7l2c"] Feb 02 17:17:47 crc kubenswrapper[4835]: I0202 17:17:47.064198 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-cce3-account-create-update-6846w"] Feb 02 17:17:47 crc kubenswrapper[4835]: I0202 17:17:47.075434 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-fcqd8"] Feb 02 17:17:47 crc kubenswrapper[4835]: I0202 17:17:47.083980 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-fcqd8"] Feb 02 17:17:47 crc kubenswrapper[4835]: I0202 17:17:47.091133 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-cce3-account-create-update-6846w"] Feb 02 17:17:47 crc kubenswrapper[4835]: I0202 17:17:47.197881 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ff49264-c189-4ebe-88c5-35845a0a5157" path="/var/lib/kubelet/pods/2ff49264-c189-4ebe-88c5-35845a0a5157/volumes" Feb 02 17:17:47 crc kubenswrapper[4835]: I0202 17:17:47.198823 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cfb23ae-a0ae-49be-94f9-b802f11a7b50" path="/var/lib/kubelet/pods/3cfb23ae-a0ae-49be-94f9-b802f11a7b50/volumes" Feb 02 17:17:47 crc kubenswrapper[4835]: I0202 17:17:47.199449 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="479d7565-e26e-40f3-8438-080b9a02d861" path="/var/lib/kubelet/pods/479d7565-e26e-40f3-8438-080b9a02d861/volumes" Feb 02 17:17:47 crc kubenswrapper[4835]: I0202 17:17:47.200047 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7b6b459-99e6-4462-a102-d29647f7b1fd" path="/var/lib/kubelet/pods/a7b6b459-99e6-4462-a102-d29647f7b1fd/volumes" Feb 02 17:17:48 crc kubenswrapper[4835]: I0202 17:17:48.026599 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-272e-account-create-update-j8f9f"] Feb 02 17:17:48 crc kubenswrapper[4835]: I0202 17:17:48.036597 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-tpcsd"] Feb 02 17:17:48 crc kubenswrapper[4835]: I0202 17:17:48.046378 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-272e-account-create-update-j8f9f"] Feb 02 17:17:48 crc kubenswrapper[4835]: I0202 17:17:48.053768 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-tpcsd"] Feb 02 17:17:48 crc kubenswrapper[4835]: I0202 17:17:48.416761 4835 generic.go:334] "Generic (PLEG): container finished" podID="a681c4be-8c84-473e-bbc9-b4eeacde7fa4" containerID="d862a07fd27a783e73b6eae2d6546d7839e2a8535c9dc06c6d14b0e568e71af8" exitCode=0 Feb 02 17:17:48 crc kubenswrapper[4835]: I0202 17:17:48.416844 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" event={"ID":"a681c4be-8c84-473e-bbc9-b4eeacde7fa4","Type":"ContainerDied","Data":"d862a07fd27a783e73b6eae2d6546d7839e2a8535c9dc06c6d14b0e568e71af8"} Feb 02 17:17:49 crc kubenswrapper[4835]: I0202 17:17:49.206072 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8aa5f59d-3ddb-4715-ba3d-f15e5503d34b" path="/var/lib/kubelet/pods/8aa5f59d-3ddb-4715-ba3d-f15e5503d34b/volumes" Feb 02 17:17:49 crc kubenswrapper[4835]: I0202 17:17:49.207710 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab0419cc-55bc-4d07-ab78-68d1fb8b639a" path="/var/lib/kubelet/pods/ab0419cc-55bc-4d07-ab78-68d1fb8b639a/volumes" Feb 02 17:17:49 crc kubenswrapper[4835]: I0202 17:17:49.866265 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:49 crc kubenswrapper[4835]: I0202 17:17:49.994811 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-ssh-key-openstack-edpm-ipam\") pod \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " Feb 02 17:17:49 crc kubenswrapper[4835]: I0202 17:17:49.994941 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qj7bd\" (UniqueName: \"kubernetes.io/projected/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-kube-api-access-qj7bd\") pod \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " Feb 02 17:17:49 crc kubenswrapper[4835]: I0202 17:17:49.994994 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-inventory\") pod \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\" (UID: \"a681c4be-8c84-473e-bbc9-b4eeacde7fa4\") " Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.001457 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-kube-api-access-qj7bd" (OuterVolumeSpecName: "kube-api-access-qj7bd") pod "a681c4be-8c84-473e-bbc9-b4eeacde7fa4" (UID: "a681c4be-8c84-473e-bbc9-b4eeacde7fa4"). InnerVolumeSpecName "kube-api-access-qj7bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.034955 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-inventory" (OuterVolumeSpecName: "inventory") pod "a681c4be-8c84-473e-bbc9-b4eeacde7fa4" (UID: "a681c4be-8c84-473e-bbc9-b4eeacde7fa4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.050758 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "a681c4be-8c84-473e-bbc9-b4eeacde7fa4" (UID: "a681c4be-8c84-473e-bbc9-b4eeacde7fa4"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.097484 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.097540 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qj7bd\" (UniqueName: \"kubernetes.io/projected/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-kube-api-access-qj7bd\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.097550 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a681c4be-8c84-473e-bbc9-b4eeacde7fa4-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.435379 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" event={"ID":"a681c4be-8c84-473e-bbc9-b4eeacde7fa4","Type":"ContainerDied","Data":"f8bc4a4645e4af1d33be3086d58c929c9b4957a235ad5bfb07ed68971ce9b176"} Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.435414 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8bc4a4645e4af1d33be3086d58c929c9b4957a235ad5bfb07ed68971ce9b176" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.435431 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.530160 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd"] Feb 02 17:17:50 crc kubenswrapper[4835]: E0202 17:17:50.530580 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a681c4be-8c84-473e-bbc9-b4eeacde7fa4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.530603 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a681c4be-8c84-473e-bbc9-b4eeacde7fa4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.530773 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a681c4be-8c84-473e-bbc9-b4eeacde7fa4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.531375 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.533615 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.533900 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.534312 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.534472 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.541790 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd"] Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.707614 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq6gz\" (UniqueName: \"kubernetes.io/projected/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-kube-api-access-xq6gz\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.707706 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-ssh-key-openstack-edpm-ipam\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.707988 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.809161 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-ssh-key-openstack-edpm-ipam\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.809301 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.809420 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq6gz\" (UniqueName: \"kubernetes.io/projected/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-kube-api-access-xq6gz\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.813008 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-ssh-key-openstack-edpm-ipam\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.825113 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq6gz\" (UniqueName: \"kubernetes.io/projected/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-kube-api-access-xq6gz\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.825622 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:50 crc kubenswrapper[4835]: I0202 17:17:50.879657 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:51 crc kubenswrapper[4835]: I0202 17:17:51.362727 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd"] Feb 02 17:17:51 crc kubenswrapper[4835]: I0202 17:17:51.370169 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 17:17:51 crc kubenswrapper[4835]: I0202 17:17:51.444149 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" event={"ID":"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8","Type":"ContainerStarted","Data":"772af729d6bec79d1aa90dbdb8fa4d1a2a18d94caf2d90a64b6363e1dd291fa3"} Feb 02 17:17:52 crc kubenswrapper[4835]: I0202 17:17:52.455476 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" event={"ID":"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8","Type":"ContainerStarted","Data":"39e0394980bc7b832551b411c6360de66fa1b01dd16c79a847db902be7ec33db"} Feb 02 17:17:52 crc kubenswrapper[4835]: I0202 17:17:52.478443 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" podStartSLOduration=2.034328108 podStartE2EDuration="2.478414656s" podCreationTimestamp="2026-02-02 17:17:50 +0000 UTC" firstStartedPulling="2026-02-02 17:17:51.369905449 +0000 UTC m=+1662.991509529" lastFinishedPulling="2026-02-02 17:17:51.813991997 +0000 UTC m=+1663.435596077" observedRunningTime="2026-02-02 17:17:52.470611614 +0000 UTC m=+1664.092215714" watchObservedRunningTime="2026-02-02 17:17:52.478414656 +0000 UTC m=+1664.100018766" Feb 02 17:17:56 crc kubenswrapper[4835]: I0202 17:17:56.498700 4835 generic.go:334] "Generic (PLEG): container finished" podID="0f0a6ea9-aebb-41e7-82f4-d0200894f9a8" containerID="39e0394980bc7b832551b411c6360de66fa1b01dd16c79a847db902be7ec33db" exitCode=0 Feb 02 17:17:56 crc kubenswrapper[4835]: I0202 17:17:56.498783 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" event={"ID":"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8","Type":"ContainerDied","Data":"39e0394980bc7b832551b411c6360de66fa1b01dd16c79a847db902be7ec33db"} Feb 02 17:17:57 crc kubenswrapper[4835]: I0202 17:17:57.991619 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.141671 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xq6gz\" (UniqueName: \"kubernetes.io/projected/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-kube-api-access-xq6gz\") pod \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.141732 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-inventory\") pod \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.141872 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-ssh-key-openstack-edpm-ipam\") pod \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\" (UID: \"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8\") " Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.153519 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-kube-api-access-xq6gz" (OuterVolumeSpecName: "kube-api-access-xq6gz") pod "0f0a6ea9-aebb-41e7-82f4-d0200894f9a8" (UID: "0f0a6ea9-aebb-41e7-82f4-d0200894f9a8"). InnerVolumeSpecName "kube-api-access-xq6gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.166084 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0f0a6ea9-aebb-41e7-82f4-d0200894f9a8" (UID: "0f0a6ea9-aebb-41e7-82f4-d0200894f9a8"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.168539 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-inventory" (OuterVolumeSpecName: "inventory") pod "0f0a6ea9-aebb-41e7-82f4-d0200894f9a8" (UID: "0f0a6ea9-aebb-41e7-82f4-d0200894f9a8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.243551 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.243590 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xq6gz\" (UniqueName: \"kubernetes.io/projected/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-kube-api-access-xq6gz\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.243602 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.521494 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" event={"ID":"0f0a6ea9-aebb-41e7-82f4-d0200894f9a8","Type":"ContainerDied","Data":"772af729d6bec79d1aa90dbdb8fa4d1a2a18d94caf2d90a64b6363e1dd291fa3"} Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.521561 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="772af729d6bec79d1aa90dbdb8fa4d1a2a18d94caf2d90a64b6363e1dd291fa3" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.521642 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.583128 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv"] Feb 02 17:17:58 crc kubenswrapper[4835]: E0202 17:17:58.583495 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f0a6ea9-aebb-41e7-82f4-d0200894f9a8" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.583522 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f0a6ea9-aebb-41e7-82f4-d0200894f9a8" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.583722 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f0a6ea9-aebb-41e7-82f4-d0200894f9a8" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.584390 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.586291 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.586504 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.587123 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.587387 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.599075 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv"] Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.749512 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.749560 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr5sj\" (UniqueName: \"kubernetes.io/projected/9798ca55-f7f5-4c09-bffc-971c1efe8971-kube-api-access-qr5sj\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.749653 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.850933 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.851081 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.851112 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr5sj\" (UniqueName: \"kubernetes.io/projected/9798ca55-f7f5-4c09-bffc-971c1efe8971-kube-api-access-qr5sj\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.855133 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.860969 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.867960 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr5sj\" (UniqueName: \"kubernetes.io/projected/9798ca55-f7f5-4c09-bffc-971c1efe8971-kube-api-access-qr5sj\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:17:58 crc kubenswrapper[4835]: I0202 17:17:58.899190 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:17:59 crc kubenswrapper[4835]: I0202 17:17:59.416157 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv"] Feb 02 17:17:59 crc kubenswrapper[4835]: I0202 17:17:59.528837 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" event={"ID":"9798ca55-f7f5-4c09-bffc-971c1efe8971","Type":"ContainerStarted","Data":"3550248a05219813c120cb57ebd47334020680cebe00b695f5e457a94c218e00"} Feb 02 17:18:00 crc kubenswrapper[4835]: I0202 17:18:00.536821 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" event={"ID":"9798ca55-f7f5-4c09-bffc-971c1efe8971","Type":"ContainerStarted","Data":"45c1c9ca8209abd4d932a5c5974e0e50a2c9f34ff3c84b4cb9d4920bb0c9c37b"} Feb 02 17:18:00 crc kubenswrapper[4835]: I0202 17:18:00.567977 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" podStartSLOduration=2.058380105 podStartE2EDuration="2.567958933s" podCreationTimestamp="2026-02-02 17:17:58 +0000 UTC" firstStartedPulling="2026-02-02 17:17:59.419632376 +0000 UTC m=+1671.041236456" lastFinishedPulling="2026-02-02 17:17:59.929211204 +0000 UTC m=+1671.550815284" observedRunningTime="2026-02-02 17:18:00.557909788 +0000 UTC m=+1672.179513918" watchObservedRunningTime="2026-02-02 17:18:00.567958933 +0000 UTC m=+1672.189563013" Feb 02 17:18:02 crc kubenswrapper[4835]: I0202 17:18:02.033591 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-zgxpq"] Feb 02 17:18:02 crc kubenswrapper[4835]: I0202 17:18:02.041563 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-zgxpq"] Feb 02 17:18:03 crc kubenswrapper[4835]: I0202 17:18:03.200928 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d200c16c-b8ab-4f2f-bc1e-56b0a1888c42" path="/var/lib/kubelet/pods/d200c16c-b8ab-4f2f-bc1e-56b0a1888c42/volumes" Feb 02 17:18:09 crc kubenswrapper[4835]: I0202 17:18:09.041930 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-fnpz7"] Feb 02 17:18:09 crc kubenswrapper[4835]: I0202 17:18:09.054147 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-fnpz7"] Feb 02 17:18:09 crc kubenswrapper[4835]: I0202 17:18:09.214779 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db8f91d5-436a-43f3-b131-5594fb4904cb" path="/var/lib/kubelet/pods/db8f91d5-436a-43f3-b131-5594fb4904cb/volumes" Feb 02 17:18:11 crc kubenswrapper[4835]: I0202 17:18:11.192319 4835 scope.go:117] "RemoveContainer" containerID="570151b15315752bf955d300658119d5c2e2df3f9ebc1ee492d9654a2c75cf8e" Feb 02 17:18:11 crc kubenswrapper[4835]: I0202 17:18:11.214677 4835 scope.go:117] "RemoveContainer" containerID="e62c5246d92814a41dd7dbe79d140c7b49283ada25eb31ffb8c267dc78a676b9" Feb 02 17:18:11 crc kubenswrapper[4835]: I0202 17:18:11.258221 4835 scope.go:117] "RemoveContainer" containerID="353e3c150183deee4372a9c1d58c724c99d5e40092c7c324785b26370da1c10c" Feb 02 17:18:11 crc kubenswrapper[4835]: I0202 17:18:11.295466 4835 scope.go:117] "RemoveContainer" containerID="7735dbf567004982e6eb0e6938ada115b76a0a70fa29279d71a560a0975a9693" Feb 02 17:18:11 crc kubenswrapper[4835]: I0202 17:18:11.343122 4835 scope.go:117] "RemoveContainer" containerID="d10a6d22158cb39069ba6ab33b9566ddb04e4f6ac9db1f1800cfb5d96b39aa6c" Feb 02 17:18:11 crc kubenswrapper[4835]: I0202 17:18:11.376580 4835 scope.go:117] "RemoveContainer" containerID="1219a044e414b7d97fea8d4b4f2fc4ba494dbf2bfffe5f90a5336768fcfd95a0" Feb 02 17:18:11 crc kubenswrapper[4835]: I0202 17:18:11.413771 4835 scope.go:117] "RemoveContainer" containerID="40616d7441c01bad5e0813a99af6d05f8fb4618cda6aebdaadbd2e7f3175f629" Feb 02 17:18:11 crc kubenswrapper[4835]: I0202 17:18:11.440086 4835 scope.go:117] "RemoveContainer" containerID="fc29c2ee0efc4e60410c6d6c24afb1b90af415f94a587b79c6b1e48555b9c2df" Feb 02 17:18:14 crc kubenswrapper[4835]: I0202 17:18:14.870563 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:18:14 crc kubenswrapper[4835]: I0202 17:18:14.871408 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:18:14 crc kubenswrapper[4835]: I0202 17:18:14.871472 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:18:14 crc kubenswrapper[4835]: I0202 17:18:14.872337 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:18:14 crc kubenswrapper[4835]: I0202 17:18:14.872409 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" gracePeriod=600 Feb 02 17:18:14 crc kubenswrapper[4835]: E0202 17:18:14.997876 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:18:15 crc kubenswrapper[4835]: I0202 17:18:15.686850 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" exitCode=0 Feb 02 17:18:15 crc kubenswrapper[4835]: I0202 17:18:15.686892 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94"} Feb 02 17:18:15 crc kubenswrapper[4835]: I0202 17:18:15.686936 4835 scope.go:117] "RemoveContainer" containerID="50373b29385bc4901c51a5e8702a6916b31f719329b1f21a631ab633cc9521bd" Feb 02 17:18:15 crc kubenswrapper[4835]: I0202 17:18:15.687603 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:18:15 crc kubenswrapper[4835]: E0202 17:18:15.687903 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.063767 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-vmwqm"] Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.081334 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-h9ppw"] Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.097611 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-3d3b-account-create-update-62lns"] Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.110083 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-rgx9x"] Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.120859 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-5705-account-create-update-mwnmq"] Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.133008 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-vmwqm"] Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.144113 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-h9ppw"] Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.154443 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-rgx9x"] Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.166828 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-3d3b-account-create-update-62lns"] Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.176550 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-5705-account-create-update-mwnmq"] Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.197061 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b184729-91fb-4ebc-8ef1-b81a2aebc754" path="/var/lib/kubelet/pods/2b184729-91fb-4ebc-8ef1-b81a2aebc754/volumes" Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.197718 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f983fbe-e05f-4c6a-8759-0d6578726175" path="/var/lib/kubelet/pods/3f983fbe-e05f-4c6a-8759-0d6578726175/volumes" Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.198387 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="628af1b6-f4bb-4235-a14e-6a72d3f40830" path="/var/lib/kubelet/pods/628af1b6-f4bb-4235-a14e-6a72d3f40830/volumes" Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.198917 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0b11f58-4665-4f50-83ec-48cfa18b3499" path="/var/lib/kubelet/pods/a0b11f58-4665-4f50-83ec-48cfa18b3499/volumes" Feb 02 17:18:25 crc kubenswrapper[4835]: I0202 17:18:25.199972 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa7c0b48-4d54-47f6-b862-42c3caeedb80" path="/var/lib/kubelet/pods/fa7c0b48-4d54-47f6-b862-42c3caeedb80/volumes" Feb 02 17:18:27 crc kubenswrapper[4835]: I0202 17:18:27.188589 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:18:27 crc kubenswrapper[4835]: E0202 17:18:27.189159 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:18:29 crc kubenswrapper[4835]: I0202 17:18:29.041415 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-03dc-account-create-update-sn2k6"] Feb 02 17:18:29 crc kubenswrapper[4835]: I0202 17:18:29.056042 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-03dc-account-create-update-sn2k6"] Feb 02 17:18:29 crc kubenswrapper[4835]: I0202 17:18:29.200007 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="879465eb-d451-428b-8f21-d4f47afe9ada" path="/var/lib/kubelet/pods/879465eb-d451-428b-8f21-d4f47afe9ada/volumes" Feb 02 17:18:33 crc kubenswrapper[4835]: I0202 17:18:33.032136 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-krtrd"] Feb 02 17:18:33 crc kubenswrapper[4835]: I0202 17:18:33.041046 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-krtrd"] Feb 02 17:18:33 crc kubenswrapper[4835]: I0202 17:18:33.201702 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="360af2ab-a220-45ad-94cf-87415175d269" path="/var/lib/kubelet/pods/360af2ab-a220-45ad-94cf-87415175d269/volumes" Feb 02 17:18:38 crc kubenswrapper[4835]: I0202 17:18:38.189581 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:18:38 crc kubenswrapper[4835]: E0202 17:18:38.190138 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:18:42 crc kubenswrapper[4835]: I0202 17:18:42.929622 4835 generic.go:334] "Generic (PLEG): container finished" podID="9798ca55-f7f5-4c09-bffc-971c1efe8971" containerID="45c1c9ca8209abd4d932a5c5974e0e50a2c9f34ff3c84b4cb9d4920bb0c9c37b" exitCode=0 Feb 02 17:18:42 crc kubenswrapper[4835]: I0202 17:18:42.929719 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" event={"ID":"9798ca55-f7f5-4c09-bffc-971c1efe8971","Type":"ContainerDied","Data":"45c1c9ca8209abd4d932a5c5974e0e50a2c9f34ff3c84b4cb9d4920bb0c9c37b"} Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.346757 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.488730 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qr5sj\" (UniqueName: \"kubernetes.io/projected/9798ca55-f7f5-4c09-bffc-971c1efe8971-kube-api-access-qr5sj\") pod \"9798ca55-f7f5-4c09-bffc-971c1efe8971\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.488904 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-ssh-key-openstack-edpm-ipam\") pod \"9798ca55-f7f5-4c09-bffc-971c1efe8971\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.489014 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-inventory\") pod \"9798ca55-f7f5-4c09-bffc-971c1efe8971\" (UID: \"9798ca55-f7f5-4c09-bffc-971c1efe8971\") " Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.495580 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9798ca55-f7f5-4c09-bffc-971c1efe8971-kube-api-access-qr5sj" (OuterVolumeSpecName: "kube-api-access-qr5sj") pod "9798ca55-f7f5-4c09-bffc-971c1efe8971" (UID: "9798ca55-f7f5-4c09-bffc-971c1efe8971"). InnerVolumeSpecName "kube-api-access-qr5sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.516131 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-inventory" (OuterVolumeSpecName: "inventory") pod "9798ca55-f7f5-4c09-bffc-971c1efe8971" (UID: "9798ca55-f7f5-4c09-bffc-971c1efe8971"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.530041 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "9798ca55-f7f5-4c09-bffc-971c1efe8971" (UID: "9798ca55-f7f5-4c09-bffc-971c1efe8971"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.591517 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.591551 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9798ca55-f7f5-4c09-bffc-971c1efe8971-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.591561 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qr5sj\" (UniqueName: \"kubernetes.io/projected/9798ca55-f7f5-4c09-bffc-971c1efe8971-kube-api-access-qr5sj\") on node \"crc\" DevicePath \"\"" Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.947663 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" event={"ID":"9798ca55-f7f5-4c09-bffc-971c1efe8971","Type":"ContainerDied","Data":"3550248a05219813c120cb57ebd47334020680cebe00b695f5e457a94c218e00"} Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.947721 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3550248a05219813c120cb57ebd47334020680cebe00b695f5e457a94c218e00" Feb 02 17:18:44 crc kubenswrapper[4835]: I0202 17:18:44.947721 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.047579 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-rmlcx"] Feb 02 17:18:45 crc kubenswrapper[4835]: E0202 17:18:45.048066 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9798ca55-f7f5-4c09-bffc-971c1efe8971" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.048092 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="9798ca55-f7f5-4c09-bffc-971c1efe8971" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.048291 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="9798ca55-f7f5-4c09-bffc-971c1efe8971" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.048923 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.051401 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.051903 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.052157 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.052434 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.058434 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-rmlcx"] Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.201463 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x2wr\" (UniqueName: \"kubernetes.io/projected/d04cb746-35ad-483f-854e-3c443906f580-kube-api-access-2x2wr\") pod \"ssh-known-hosts-edpm-deployment-rmlcx\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.201585 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-rmlcx\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.201643 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-rmlcx\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.303179 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x2wr\" (UniqueName: \"kubernetes.io/projected/d04cb746-35ad-483f-854e-3c443906f580-kube-api-access-2x2wr\") pod \"ssh-known-hosts-edpm-deployment-rmlcx\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.303252 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-rmlcx\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.303298 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-rmlcx\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.308518 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-rmlcx\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.309636 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-rmlcx\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.320495 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x2wr\" (UniqueName: \"kubernetes.io/projected/d04cb746-35ad-483f-854e-3c443906f580-kube-api-access-2x2wr\") pod \"ssh-known-hosts-edpm-deployment-rmlcx\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.365437 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.934332 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-rmlcx"] Feb 02 17:18:45 crc kubenswrapper[4835]: W0202 17:18:45.937217 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd04cb746_35ad_483f_854e_3c443906f580.slice/crio-9e913050f861d46f317c60c0e619dbf46b2307e690d67e3340d99edbba899b09 WatchSource:0}: Error finding container 9e913050f861d46f317c60c0e619dbf46b2307e690d67e3340d99edbba899b09: Status 404 returned error can't find the container with id 9e913050f861d46f317c60c0e619dbf46b2307e690d67e3340d99edbba899b09 Feb 02 17:18:45 crc kubenswrapper[4835]: I0202 17:18:45.958144 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" event={"ID":"d04cb746-35ad-483f-854e-3c443906f580","Type":"ContainerStarted","Data":"9e913050f861d46f317c60c0e619dbf46b2307e690d67e3340d99edbba899b09"} Feb 02 17:18:46 crc kubenswrapper[4835]: I0202 17:18:46.969744 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" event={"ID":"d04cb746-35ad-483f-854e-3c443906f580","Type":"ContainerStarted","Data":"29af561a87a222d4353e2e6ca0de8bca62f286233bfce597173b45302b625f3c"} Feb 02 17:18:46 crc kubenswrapper[4835]: I0202 17:18:46.990701 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" podStartSLOduration=1.449829875 podStartE2EDuration="1.990683174s" podCreationTimestamp="2026-02-02 17:18:45 +0000 UTC" firstStartedPulling="2026-02-02 17:18:45.939222445 +0000 UTC m=+1717.560826525" lastFinishedPulling="2026-02-02 17:18:46.480075744 +0000 UTC m=+1718.101679824" observedRunningTime="2026-02-02 17:18:46.988125041 +0000 UTC m=+1718.609729161" watchObservedRunningTime="2026-02-02 17:18:46.990683174 +0000 UTC m=+1718.612287254" Feb 02 17:18:52 crc kubenswrapper[4835]: I0202 17:18:52.189298 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:18:52 crc kubenswrapper[4835]: E0202 17:18:52.189899 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:18:53 crc kubenswrapper[4835]: I0202 17:18:53.027815 4835 generic.go:334] "Generic (PLEG): container finished" podID="d04cb746-35ad-483f-854e-3c443906f580" containerID="29af561a87a222d4353e2e6ca0de8bca62f286233bfce597173b45302b625f3c" exitCode=0 Feb 02 17:18:53 crc kubenswrapper[4835]: I0202 17:18:53.027944 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" event={"ID":"d04cb746-35ad-483f-854e-3c443906f580","Type":"ContainerDied","Data":"29af561a87a222d4353e2e6ca0de8bca62f286233bfce597173b45302b625f3c"} Feb 02 17:18:54 crc kubenswrapper[4835]: I0202 17:18:54.480432 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:54 crc kubenswrapper[4835]: I0202 17:18:54.586129 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-inventory-0\") pod \"d04cb746-35ad-483f-854e-3c443906f580\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " Feb 02 17:18:54 crc kubenswrapper[4835]: I0202 17:18:54.586278 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x2wr\" (UniqueName: \"kubernetes.io/projected/d04cb746-35ad-483f-854e-3c443906f580-kube-api-access-2x2wr\") pod \"d04cb746-35ad-483f-854e-3c443906f580\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " Feb 02 17:18:54 crc kubenswrapper[4835]: I0202 17:18:54.586343 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-ssh-key-openstack-edpm-ipam\") pod \"d04cb746-35ad-483f-854e-3c443906f580\" (UID: \"d04cb746-35ad-483f-854e-3c443906f580\") " Feb 02 17:18:54 crc kubenswrapper[4835]: I0202 17:18:54.591279 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d04cb746-35ad-483f-854e-3c443906f580-kube-api-access-2x2wr" (OuterVolumeSpecName: "kube-api-access-2x2wr") pod "d04cb746-35ad-483f-854e-3c443906f580" (UID: "d04cb746-35ad-483f-854e-3c443906f580"). InnerVolumeSpecName "kube-api-access-2x2wr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:18:54 crc kubenswrapper[4835]: I0202 17:18:54.618217 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "d04cb746-35ad-483f-854e-3c443906f580" (UID: "d04cb746-35ad-483f-854e-3c443906f580"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:18:54 crc kubenswrapper[4835]: I0202 17:18:54.618646 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "d04cb746-35ad-483f-854e-3c443906f580" (UID: "d04cb746-35ad-483f-854e-3c443906f580"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:18:54 crc kubenswrapper[4835]: I0202 17:18:54.688697 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x2wr\" (UniqueName: \"kubernetes.io/projected/d04cb746-35ad-483f-854e-3c443906f580-kube-api-access-2x2wr\") on node \"crc\" DevicePath \"\"" Feb 02 17:18:54 crc kubenswrapper[4835]: I0202 17:18:54.688749 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:18:54 crc kubenswrapper[4835]: I0202 17:18:54.688767 4835 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d04cb746-35ad-483f-854e-3c443906f580-inventory-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.047778 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" event={"ID":"d04cb746-35ad-483f-854e-3c443906f580","Type":"ContainerDied","Data":"9e913050f861d46f317c60c0e619dbf46b2307e690d67e3340d99edbba899b09"} Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.047818 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e913050f861d46f317c60c0e619dbf46b2307e690d67e3340d99edbba899b09" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.047894 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-rmlcx" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.120108 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49"] Feb 02 17:18:55 crc kubenswrapper[4835]: E0202 17:18:55.120590 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d04cb746-35ad-483f-854e-3c443906f580" containerName="ssh-known-hosts-edpm-deployment" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.120614 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="d04cb746-35ad-483f-854e-3c443906f580" containerName="ssh-known-hosts-edpm-deployment" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.120831 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="d04cb746-35ad-483f-854e-3c443906f580" containerName="ssh-known-hosts-edpm-deployment" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.121591 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.123763 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.124007 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.124031 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.124246 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.137029 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49"] Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.198636 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-srk49\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.198923 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-srk49\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.199077 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg22d\" (UniqueName: \"kubernetes.io/projected/08bb0d31-52f5-4663-8331-847545aa8021-kube-api-access-lg22d\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-srk49\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.300755 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-srk49\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.300833 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg22d\" (UniqueName: \"kubernetes.io/projected/08bb0d31-52f5-4663-8331-847545aa8021-kube-api-access-lg22d\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-srk49\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.300869 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-srk49\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.313012 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-srk49\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.315364 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-srk49\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.318573 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg22d\" (UniqueName: \"kubernetes.io/projected/08bb0d31-52f5-4663-8331-847545aa8021-kube-api-access-lg22d\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-srk49\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.440503 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:18:55 crc kubenswrapper[4835]: I0202 17:18:55.966504 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49"] Feb 02 17:18:56 crc kubenswrapper[4835]: I0202 17:18:56.056297 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" event={"ID":"08bb0d31-52f5-4663-8331-847545aa8021","Type":"ContainerStarted","Data":"df64e19327f4ecbdc69dc0d4e8422537b748a43507d58da1a9ab2fdc338f5f92"} Feb 02 17:18:58 crc kubenswrapper[4835]: I0202 17:18:58.074391 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" event={"ID":"08bb0d31-52f5-4663-8331-847545aa8021","Type":"ContainerStarted","Data":"f5a433a469eb0c69a8526b8b4bee40ecf4d6ce95867bf3d5a3fcca4f8fb2d0ab"} Feb 02 17:18:58 crc kubenswrapper[4835]: I0202 17:18:58.088631 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" podStartSLOduration=2.173689595 podStartE2EDuration="3.088614558s" podCreationTimestamp="2026-02-02 17:18:55 +0000 UTC" firstStartedPulling="2026-02-02 17:18:55.975913571 +0000 UTC m=+1727.597517651" lastFinishedPulling="2026-02-02 17:18:56.890838514 +0000 UTC m=+1728.512442614" observedRunningTime="2026-02-02 17:18:58.086513548 +0000 UTC m=+1729.708117648" watchObservedRunningTime="2026-02-02 17:18:58.088614558 +0000 UTC m=+1729.710218638" Feb 02 17:18:59 crc kubenswrapper[4835]: I0202 17:18:59.041641 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-5nshn"] Feb 02 17:18:59 crc kubenswrapper[4835]: I0202 17:18:59.050531 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-5nshn"] Feb 02 17:18:59 crc kubenswrapper[4835]: I0202 17:18:59.198522 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b04049d-f2c7-4368-969b-4b5d1d4628b8" path="/var/lib/kubelet/pods/5b04049d-f2c7-4368-969b-4b5d1d4628b8/volumes" Feb 02 17:19:01 crc kubenswrapper[4835]: I0202 17:19:01.039010 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-nhtch"] Feb 02 17:19:01 crc kubenswrapper[4835]: I0202 17:19:01.050133 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-nhtch"] Feb 02 17:19:01 crc kubenswrapper[4835]: I0202 17:19:01.203449 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08cf9281-9a97-420d-b734-735a7975dfe9" path="/var/lib/kubelet/pods/08cf9281-9a97-420d-b734-735a7975dfe9/volumes" Feb 02 17:19:03 crc kubenswrapper[4835]: I0202 17:19:03.032926 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-bgpns"] Feb 02 17:19:03 crc kubenswrapper[4835]: I0202 17:19:03.063938 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-bgpns"] Feb 02 17:19:03 crc kubenswrapper[4835]: I0202 17:19:03.198755 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bbf2f76-2a57-4df0-989c-3a55710ef86c" path="/var/lib/kubelet/pods/6bbf2f76-2a57-4df0-989c-3a55710ef86c/volumes" Feb 02 17:19:04 crc kubenswrapper[4835]: I0202 17:19:04.139561 4835 generic.go:334] "Generic (PLEG): container finished" podID="08bb0d31-52f5-4663-8331-847545aa8021" containerID="f5a433a469eb0c69a8526b8b4bee40ecf4d6ce95867bf3d5a3fcca4f8fb2d0ab" exitCode=0 Feb 02 17:19:04 crc kubenswrapper[4835]: I0202 17:19:04.139620 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" event={"ID":"08bb0d31-52f5-4663-8331-847545aa8021","Type":"ContainerDied","Data":"f5a433a469eb0c69a8526b8b4bee40ecf4d6ce95867bf3d5a3fcca4f8fb2d0ab"} Feb 02 17:19:05 crc kubenswrapper[4835]: I0202 17:19:05.569986 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:19:05 crc kubenswrapper[4835]: I0202 17:19:05.697909 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-inventory\") pod \"08bb0d31-52f5-4663-8331-847545aa8021\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " Feb 02 17:19:05 crc kubenswrapper[4835]: I0202 17:19:05.698003 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lg22d\" (UniqueName: \"kubernetes.io/projected/08bb0d31-52f5-4663-8331-847545aa8021-kube-api-access-lg22d\") pod \"08bb0d31-52f5-4663-8331-847545aa8021\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " Feb 02 17:19:05 crc kubenswrapper[4835]: I0202 17:19:05.698081 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-ssh-key-openstack-edpm-ipam\") pod \"08bb0d31-52f5-4663-8331-847545aa8021\" (UID: \"08bb0d31-52f5-4663-8331-847545aa8021\") " Feb 02 17:19:05 crc kubenswrapper[4835]: I0202 17:19:05.706561 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08bb0d31-52f5-4663-8331-847545aa8021-kube-api-access-lg22d" (OuterVolumeSpecName: "kube-api-access-lg22d") pod "08bb0d31-52f5-4663-8331-847545aa8021" (UID: "08bb0d31-52f5-4663-8331-847545aa8021"). InnerVolumeSpecName "kube-api-access-lg22d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:19:05 crc kubenswrapper[4835]: I0202 17:19:05.729991 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "08bb0d31-52f5-4663-8331-847545aa8021" (UID: "08bb0d31-52f5-4663-8331-847545aa8021"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:19:05 crc kubenswrapper[4835]: I0202 17:19:05.738700 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-inventory" (OuterVolumeSpecName: "inventory") pod "08bb0d31-52f5-4663-8331-847545aa8021" (UID: "08bb0d31-52f5-4663-8331-847545aa8021"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:19:05 crc kubenswrapper[4835]: I0202 17:19:05.799965 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:19:05 crc kubenswrapper[4835]: I0202 17:19:05.800126 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lg22d\" (UniqueName: \"kubernetes.io/projected/08bb0d31-52f5-4663-8331-847545aa8021-kube-api-access-lg22d\") on node \"crc\" DevicePath \"\"" Feb 02 17:19:05 crc kubenswrapper[4835]: I0202 17:19:05.800138 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/08bb0d31-52f5-4663-8331-847545aa8021-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.034664 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7w9z6"] Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.042625 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7w9z6"] Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.156685 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" event={"ID":"08bb0d31-52f5-4663-8331-847545aa8021","Type":"ContainerDied","Data":"df64e19327f4ecbdc69dc0d4e8422537b748a43507d58da1a9ab2fdc338f5f92"} Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.156725 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.156738 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df64e19327f4ecbdc69dc0d4e8422537b748a43507d58da1a9ab2fdc338f5f92" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.284683 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6"] Feb 02 17:19:06 crc kubenswrapper[4835]: E0202 17:19:06.285103 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08bb0d31-52f5-4663-8331-847545aa8021" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.285122 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="08bb0d31-52f5-4663-8331-847545aa8021" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.285316 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="08bb0d31-52f5-4663-8331-847545aa8021" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.285890 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.289868 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.290175 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.290391 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.290525 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.294387 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6"] Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.412158 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.412229 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.412285 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qhnb\" (UniqueName: \"kubernetes.io/projected/2b809273-7fb1-4ca6-a2f6-a65dae67678f-kube-api-access-6qhnb\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.514851 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.514944 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.515019 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qhnb\" (UniqueName: \"kubernetes.io/projected/2b809273-7fb1-4ca6-a2f6-a65dae67678f-kube-api-access-6qhnb\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.527058 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.527060 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.544857 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qhnb\" (UniqueName: \"kubernetes.io/projected/2b809273-7fb1-4ca6-a2f6-a65dae67678f-kube-api-access-6qhnb\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:06 crc kubenswrapper[4835]: I0202 17:19:06.601151 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:07 crc kubenswrapper[4835]: I0202 17:19:07.193438 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:19:07 crc kubenswrapper[4835]: E0202 17:19:07.195235 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:19:07 crc kubenswrapper[4835]: I0202 17:19:07.221711 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4534fed-f27c-4656-b496-cec6f87d9915" path="/var/lib/kubelet/pods/c4534fed-f27c-4656-b496-cec6f87d9915/volumes" Feb 02 17:19:07 crc kubenswrapper[4835]: I0202 17:19:07.222654 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6"] Feb 02 17:19:08 crc kubenswrapper[4835]: I0202 17:19:08.175535 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" event={"ID":"2b809273-7fb1-4ca6-a2f6-a65dae67678f","Type":"ContainerStarted","Data":"c9ef10faeffd827c049817fc26193697ad1783dd921248d788b65807bb125be7"} Feb 02 17:19:08 crc kubenswrapper[4835]: I0202 17:19:08.175888 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" event={"ID":"2b809273-7fb1-4ca6-a2f6-a65dae67678f","Type":"ContainerStarted","Data":"15ddab9e9cff504f2be59d3dda0aec2c5be0daf6d634e8fca0197e3b65af2888"} Feb 02 17:19:08 crc kubenswrapper[4835]: I0202 17:19:08.205285 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" podStartSLOduration=1.749778909 podStartE2EDuration="2.205244814s" podCreationTimestamp="2026-02-02 17:19:06 +0000 UTC" firstStartedPulling="2026-02-02 17:19:07.1987084 +0000 UTC m=+1738.820312480" lastFinishedPulling="2026-02-02 17:19:07.654174305 +0000 UTC m=+1739.275778385" observedRunningTime="2026-02-02 17:19:08.196604238 +0000 UTC m=+1739.818208338" watchObservedRunningTime="2026-02-02 17:19:08.205244814 +0000 UTC m=+1739.826848894" Feb 02 17:19:11 crc kubenswrapper[4835]: I0202 17:19:11.615512 4835 scope.go:117] "RemoveContainer" containerID="3c78a6ca02fca1ebd6ed30ff1e6a7bc98054b1686135fee3ecc37f6eab2a2186" Feb 02 17:19:11 crc kubenswrapper[4835]: I0202 17:19:11.660520 4835 scope.go:117] "RemoveContainer" containerID="b83db10bd2169b0573e8cadb9407af9455a2182698097be42f06879c8e3ecba7" Feb 02 17:19:11 crc kubenswrapper[4835]: I0202 17:19:11.688869 4835 scope.go:117] "RemoveContainer" containerID="a20068a59cd5fffb027c45bb7ebbe3177ad9eb0bf1f20f1e0d4f9c81daaeaf02" Feb 02 17:19:11 crc kubenswrapper[4835]: I0202 17:19:11.735091 4835 scope.go:117] "RemoveContainer" containerID="5685c3891e165f448507a9f1a25d9802b4ae076323754161deb494c7573ff008" Feb 02 17:19:11 crc kubenswrapper[4835]: I0202 17:19:11.769872 4835 scope.go:117] "RemoveContainer" containerID="dc3360bcb7da5b66db4f6628de0e7ae9395bde1426cf06961a947c59b9908940" Feb 02 17:19:11 crc kubenswrapper[4835]: I0202 17:19:11.812805 4835 scope.go:117] "RemoveContainer" containerID="421cb20ec708a4903c8a9c761652d6b6ca91e86ab8153782a608d5dd4973fdf6" Feb 02 17:19:11 crc kubenswrapper[4835]: I0202 17:19:11.847803 4835 scope.go:117] "RemoveContainer" containerID="6c2502d5dfb6d2e0a339e33ccb89f4fa7bcd4887d1335c82fdf1399ea07a370f" Feb 02 17:19:11 crc kubenswrapper[4835]: I0202 17:19:11.869010 4835 scope.go:117] "RemoveContainer" containerID="31f44bb6e34fca83d5d21668588deee6a142c2ef364147899a00db58828e5e82" Feb 02 17:19:11 crc kubenswrapper[4835]: I0202 17:19:11.901897 4835 scope.go:117] "RemoveContainer" containerID="1ae3daae2474db250f31442d1e93bc0da7047890afe782281e64ce5eeb0a2ca4" Feb 02 17:19:11 crc kubenswrapper[4835]: I0202 17:19:11.926836 4835 scope.go:117] "RemoveContainer" containerID="7057d6711b021ccf6b0f56839384edc2db6d7aea71b87961dfba1e54e94f5a0b" Feb 02 17:19:11 crc kubenswrapper[4835]: I0202 17:19:11.953103 4835 scope.go:117] "RemoveContainer" containerID="7bffce90b5ea58660419802f3741f27e83986bad2e33b2133aad22d7bc39ec47" Feb 02 17:19:17 crc kubenswrapper[4835]: I0202 17:19:17.261051 4835 generic.go:334] "Generic (PLEG): container finished" podID="2b809273-7fb1-4ca6-a2f6-a65dae67678f" containerID="c9ef10faeffd827c049817fc26193697ad1783dd921248d788b65807bb125be7" exitCode=0 Feb 02 17:19:17 crc kubenswrapper[4835]: I0202 17:19:17.261147 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" event={"ID":"2b809273-7fb1-4ca6-a2f6-a65dae67678f","Type":"ContainerDied","Data":"c9ef10faeffd827c049817fc26193697ad1783dd921248d788b65807bb125be7"} Feb 02 17:19:18 crc kubenswrapper[4835]: I0202 17:19:18.709498 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:18 crc kubenswrapper[4835]: I0202 17:19:18.856596 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qhnb\" (UniqueName: \"kubernetes.io/projected/2b809273-7fb1-4ca6-a2f6-a65dae67678f-kube-api-access-6qhnb\") pod \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " Feb 02 17:19:18 crc kubenswrapper[4835]: I0202 17:19:18.856689 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-inventory\") pod \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " Feb 02 17:19:18 crc kubenswrapper[4835]: I0202 17:19:18.856796 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-ssh-key-openstack-edpm-ipam\") pod \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\" (UID: \"2b809273-7fb1-4ca6-a2f6-a65dae67678f\") " Feb 02 17:19:18 crc kubenswrapper[4835]: I0202 17:19:18.862613 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b809273-7fb1-4ca6-a2f6-a65dae67678f-kube-api-access-6qhnb" (OuterVolumeSpecName: "kube-api-access-6qhnb") pod "2b809273-7fb1-4ca6-a2f6-a65dae67678f" (UID: "2b809273-7fb1-4ca6-a2f6-a65dae67678f"). InnerVolumeSpecName "kube-api-access-6qhnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:19:18 crc kubenswrapper[4835]: I0202 17:19:18.889871 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-inventory" (OuterVolumeSpecName: "inventory") pod "2b809273-7fb1-4ca6-a2f6-a65dae67678f" (UID: "2b809273-7fb1-4ca6-a2f6-a65dae67678f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:19:18 crc kubenswrapper[4835]: I0202 17:19:18.891532 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "2b809273-7fb1-4ca6-a2f6-a65dae67678f" (UID: "2b809273-7fb1-4ca6-a2f6-a65dae67678f"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:19:18 crc kubenswrapper[4835]: I0202 17:19:18.958996 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qhnb\" (UniqueName: \"kubernetes.io/projected/2b809273-7fb1-4ca6-a2f6-a65dae67678f-kube-api-access-6qhnb\") on node \"crc\" DevicePath \"\"" Feb 02 17:19:18 crc kubenswrapper[4835]: I0202 17:19:18.959040 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:19:18 crc kubenswrapper[4835]: I0202 17:19:18.959055 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2b809273-7fb1-4ca6-a2f6-a65dae67678f-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:19:19 crc kubenswrapper[4835]: I0202 17:19:19.279618 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" event={"ID":"2b809273-7fb1-4ca6-a2f6-a65dae67678f","Type":"ContainerDied","Data":"15ddab9e9cff504f2be59d3dda0aec2c5be0daf6d634e8fca0197e3b65af2888"} Feb 02 17:19:19 crc kubenswrapper[4835]: I0202 17:19:19.279987 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15ddab9e9cff504f2be59d3dda0aec2c5be0daf6d634e8fca0197e3b65af2888" Feb 02 17:19:19 crc kubenswrapper[4835]: I0202 17:19:19.279781 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6" Feb 02 17:19:20 crc kubenswrapper[4835]: I0202 17:19:20.189269 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:19:20 crc kubenswrapper[4835]: E0202 17:19:20.189679 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:19:21 crc kubenswrapper[4835]: I0202 17:19:21.042561 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-65fpg"] Feb 02 17:19:21 crc kubenswrapper[4835]: I0202 17:19:21.055041 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-65fpg"] Feb 02 17:19:21 crc kubenswrapper[4835]: I0202 17:19:21.209509 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a6ab880-bf41-45c8-a66c-d096cf3d6eb9" path="/var/lib/kubelet/pods/7a6ab880-bf41-45c8-a66c-d096cf3d6eb9/volumes" Feb 02 17:19:34 crc kubenswrapper[4835]: I0202 17:19:34.189571 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:19:34 crc kubenswrapper[4835]: E0202 17:19:34.190850 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:19:45 crc kubenswrapper[4835]: I0202 17:19:45.191437 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:19:45 crc kubenswrapper[4835]: E0202 17:19:45.192724 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:19:54 crc kubenswrapper[4835]: I0202 17:19:54.057201 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-d4cr7"] Feb 02 17:19:54 crc kubenswrapper[4835]: I0202 17:19:54.066457 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-d4cr7"] Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.033285 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-gcjzv"] Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.048479 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-5861-account-create-update-8fgpf"] Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.065008 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-d089-account-create-update-vrbcv"] Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.076150 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-bm5gm"] Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.084294 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-a3b3-account-create-update-dq7zp"] Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.091778 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-gcjzv"] Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.100042 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-d089-account-create-update-vrbcv"] Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.108295 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-5861-account-create-update-8fgpf"] Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.117336 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-bm5gm"] Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.126607 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-a3b3-account-create-update-dq7zp"] Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.204256 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="118439ce-dfb4-462c-91f5-c989b2f82f1b" path="/var/lib/kubelet/pods/118439ce-dfb4-462c-91f5-c989b2f82f1b/volumes" Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.205267 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="233822fd-9e0a-4c0c-8591-0fce2284f28c" path="/var/lib/kubelet/pods/233822fd-9e0a-4c0c-8591-0fce2284f28c/volumes" Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.206150 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="732f4945-296c-4365-8854-d4633be82d41" path="/var/lib/kubelet/pods/732f4945-296c-4365-8854-d4633be82d41/volumes" Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.207337 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a836f890-a488-4781-bafc-1e8a3b91f0a7" path="/var/lib/kubelet/pods/a836f890-a488-4781-bafc-1e8a3b91f0a7/volumes" Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.208787 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b157515a-ef65-4c3c-9eb1-b015cf54a845" path="/var/lib/kubelet/pods/b157515a-ef65-4c3c-9eb1-b015cf54a845/volumes" Feb 02 17:19:55 crc kubenswrapper[4835]: I0202 17:19:55.209572 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee5b5017-e8e0-40e3-b535-188b1443458e" path="/var/lib/kubelet/pods/ee5b5017-e8e0-40e3-b535-188b1443458e/volumes" Feb 02 17:20:01 crc kubenswrapper[4835]: I0202 17:20:01.190113 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:20:01 crc kubenswrapper[4835]: E0202 17:20:01.190872 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:20:12 crc kubenswrapper[4835]: I0202 17:20:12.133660 4835 scope.go:117] "RemoveContainer" containerID="c8b67bd71c4cf201081f3260954dd1d767d34ca3dc421682692a7545df7af09c" Feb 02 17:20:12 crc kubenswrapper[4835]: I0202 17:20:12.172568 4835 scope.go:117] "RemoveContainer" containerID="6f383e83e9c08d5517a64d1f621ca5dbd8c9f4345cc44aa913cc65375192b2f2" Feb 02 17:20:12 crc kubenswrapper[4835]: I0202 17:20:12.233562 4835 scope.go:117] "RemoveContainer" containerID="d839df2d790a94415c72bcdd9b354b2987cbf0b81567df88bb8ba7c7a742120c" Feb 02 17:20:12 crc kubenswrapper[4835]: I0202 17:20:12.293658 4835 scope.go:117] "RemoveContainer" containerID="30e57ba54d75acf1d00b0a4c3cc76a41faa518c5165fd00ee6ba3607e2146b54" Feb 02 17:20:12 crc kubenswrapper[4835]: I0202 17:20:12.341085 4835 scope.go:117] "RemoveContainer" containerID="3bcc0f49b56c828ba04d5a7e46d43b4dc4f7a06648dc289ed0c0d97d3abbc0c5" Feb 02 17:20:12 crc kubenswrapper[4835]: I0202 17:20:12.378962 4835 scope.go:117] "RemoveContainer" containerID="a4143ddc1f2c674d003865b6b2d5c585b01901dd5af9065a28d023d5f477bf67" Feb 02 17:20:12 crc kubenswrapper[4835]: I0202 17:20:12.420171 4835 scope.go:117] "RemoveContainer" containerID="3d26f0bce92def978696edee263ee01d46d72d4c212ae7facbd778e8bab9df30" Feb 02 17:20:13 crc kubenswrapper[4835]: I0202 17:20:13.193026 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:20:13 crc kubenswrapper[4835]: E0202 17:20:13.195066 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:20:21 crc kubenswrapper[4835]: I0202 17:20:21.060243 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l5fts"] Feb 02 17:20:21 crc kubenswrapper[4835]: I0202 17:20:21.073406 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l5fts"] Feb 02 17:20:21 crc kubenswrapper[4835]: I0202 17:20:21.201462 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50e803e5-2a0b-4a50-8644-fa079b131ee5" path="/var/lib/kubelet/pods/50e803e5-2a0b-4a50-8644-fa079b131ee5/volumes" Feb 02 17:20:25 crc kubenswrapper[4835]: I0202 17:20:25.189120 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:20:25 crc kubenswrapper[4835]: E0202 17:20:25.189615 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:20:37 crc kubenswrapper[4835]: I0202 17:20:37.189335 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:20:37 crc kubenswrapper[4835]: E0202 17:20:37.190390 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:20:39 crc kubenswrapper[4835]: I0202 17:20:39.031912 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-bftbv"] Feb 02 17:20:39 crc kubenswrapper[4835]: I0202 17:20:39.039650 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-bftbv"] Feb 02 17:20:39 crc kubenswrapper[4835]: I0202 17:20:39.199520 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9faacb79-efaa-411d-9d65-23b6b602b4d2" path="/var/lib/kubelet/pods/9faacb79-efaa-411d-9d65-23b6b602b4d2/volumes" Feb 02 17:20:41 crc kubenswrapper[4835]: I0202 17:20:41.055050 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wtzmt"] Feb 02 17:20:41 crc kubenswrapper[4835]: I0202 17:20:41.066414 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-wtzmt"] Feb 02 17:20:41 crc kubenswrapper[4835]: I0202 17:20:41.203410 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97d1d3dc-e7a1-4a15-b586-a285bcc6cff6" path="/var/lib/kubelet/pods/97d1d3dc-e7a1-4a15-b586-a285bcc6cff6/volumes" Feb 02 17:20:52 crc kubenswrapper[4835]: I0202 17:20:52.189238 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:20:52 crc kubenswrapper[4835]: E0202 17:20:52.190265 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:21:04 crc kubenswrapper[4835]: I0202 17:21:04.188891 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:21:04 crc kubenswrapper[4835]: E0202 17:21:04.189639 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:21:12 crc kubenswrapper[4835]: I0202 17:21:12.552147 4835 scope.go:117] "RemoveContainer" containerID="42d0cb082a50cb0d6c429ebb9f06a478f7a888aef9d9c712877d6e7a72c670a7" Feb 02 17:21:12 crc kubenswrapper[4835]: I0202 17:21:12.593717 4835 scope.go:117] "RemoveContainer" containerID="fb30dfe4f380c1a85fdce7452c1c6efad60727f79f3d6b27b8d5404fb145351d" Feb 02 17:21:12 crc kubenswrapper[4835]: I0202 17:21:12.632498 4835 scope.go:117] "RemoveContainer" containerID="575b04fb6bd5a6e55cee6452abc9e43a2d4bf78fbe63f1332736ac695d8477ed" Feb 02 17:21:16 crc kubenswrapper[4835]: I0202 17:21:16.190700 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:21:16 crc kubenswrapper[4835]: E0202 17:21:16.194568 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:21:25 crc kubenswrapper[4835]: I0202 17:21:25.049422 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-f44hv"] Feb 02 17:21:25 crc kubenswrapper[4835]: I0202 17:21:25.058485 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-f44hv"] Feb 02 17:21:25 crc kubenswrapper[4835]: I0202 17:21:25.200965 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b693642-0cee-4d02-b938-4a1fc245e8a0" path="/var/lib/kubelet/pods/7b693642-0cee-4d02-b938-4a1fc245e8a0/volumes" Feb 02 17:21:28 crc kubenswrapper[4835]: I0202 17:21:28.188703 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:21:28 crc kubenswrapper[4835]: E0202 17:21:28.189186 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:21:40 crc kubenswrapper[4835]: I0202 17:21:40.188633 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:21:40 crc kubenswrapper[4835]: E0202 17:21:40.190398 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:21:54 crc kubenswrapper[4835]: I0202 17:21:54.188763 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:21:54 crc kubenswrapper[4835]: E0202 17:21:54.189588 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:22:08 crc kubenswrapper[4835]: I0202 17:22:08.189791 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:22:08 crc kubenswrapper[4835]: E0202 17:22:08.190614 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:22:12 crc kubenswrapper[4835]: I0202 17:22:12.738244 4835 scope.go:117] "RemoveContainer" containerID="4aa4ff3b7299038323b1660bf7af78493b749ca57ee87fb124941fd7b4597032" Feb 02 17:22:23 crc kubenswrapper[4835]: I0202 17:22:23.188839 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:22:23 crc kubenswrapper[4835]: E0202 17:22:23.189784 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:22:36 crc kubenswrapper[4835]: I0202 17:22:36.188532 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:22:36 crc kubenswrapper[4835]: E0202 17:22:36.189479 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:22:50 crc kubenswrapper[4835]: I0202 17:22:50.188678 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:22:50 crc kubenswrapper[4835]: E0202 17:22:50.189476 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:22:55 crc kubenswrapper[4835]: I0202 17:22:55.961796 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rm94s"] Feb 02 17:22:55 crc kubenswrapper[4835]: E0202 17:22:55.965336 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b809273-7fb1-4ca6-a2f6-a65dae67678f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:22:55 crc kubenswrapper[4835]: I0202 17:22:55.965364 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b809273-7fb1-4ca6-a2f6-a65dae67678f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:22:55 crc kubenswrapper[4835]: I0202 17:22:55.965592 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b809273-7fb1-4ca6-a2f6-a65dae67678f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:22:55 crc kubenswrapper[4835]: I0202 17:22:55.967096 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:22:55 crc kubenswrapper[4835]: I0202 17:22:55.978701 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rm94s"] Feb 02 17:22:56 crc kubenswrapper[4835]: I0202 17:22:56.147438 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-catalog-content\") pod \"redhat-operators-rm94s\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:22:56 crc kubenswrapper[4835]: I0202 17:22:56.147517 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-utilities\") pod \"redhat-operators-rm94s\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:22:56 crc kubenswrapper[4835]: I0202 17:22:56.147607 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bdlx\" (UniqueName: \"kubernetes.io/projected/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-kube-api-access-9bdlx\") pod \"redhat-operators-rm94s\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:22:56 crc kubenswrapper[4835]: I0202 17:22:56.249874 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-catalog-content\") pod \"redhat-operators-rm94s\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:22:56 crc kubenswrapper[4835]: I0202 17:22:56.249972 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-utilities\") pod \"redhat-operators-rm94s\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:22:56 crc kubenswrapper[4835]: I0202 17:22:56.250052 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bdlx\" (UniqueName: \"kubernetes.io/projected/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-kube-api-access-9bdlx\") pod \"redhat-operators-rm94s\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:22:56 crc kubenswrapper[4835]: I0202 17:22:56.250775 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-catalog-content\") pod \"redhat-operators-rm94s\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:22:56 crc kubenswrapper[4835]: I0202 17:22:56.250811 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-utilities\") pod \"redhat-operators-rm94s\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:22:56 crc kubenswrapper[4835]: I0202 17:22:56.271986 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bdlx\" (UniqueName: \"kubernetes.io/projected/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-kube-api-access-9bdlx\") pod \"redhat-operators-rm94s\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:22:56 crc kubenswrapper[4835]: I0202 17:22:56.288763 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:22:56 crc kubenswrapper[4835]: I0202 17:22:56.757720 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rm94s"] Feb 02 17:22:56 crc kubenswrapper[4835]: W0202 17:22:56.765099 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9d9c767_3ef6_48f2_b9d3_925c673f2df9.slice/crio-0e414e232605209ead25f3e69dec1a8bda856c69715b6fc03b7327f68a83ad04 WatchSource:0}: Error finding container 0e414e232605209ead25f3e69dec1a8bda856c69715b6fc03b7327f68a83ad04: Status 404 returned error can't find the container with id 0e414e232605209ead25f3e69dec1a8bda856c69715b6fc03b7327f68a83ad04 Feb 02 17:22:57 crc kubenswrapper[4835]: I0202 17:22:57.542545 4835 generic.go:334] "Generic (PLEG): container finished" podID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" containerID="86525c43aec9b9863d877e48ef1f99dfff0e585f03b07143af5d2bdaf00c742d" exitCode=0 Feb 02 17:22:57 crc kubenswrapper[4835]: I0202 17:22:57.542640 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm94s" event={"ID":"d9d9c767-3ef6-48f2-b9d3-925c673f2df9","Type":"ContainerDied","Data":"86525c43aec9b9863d877e48ef1f99dfff0e585f03b07143af5d2bdaf00c742d"} Feb 02 17:22:57 crc kubenswrapper[4835]: I0202 17:22:57.542859 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm94s" event={"ID":"d9d9c767-3ef6-48f2-b9d3-925c673f2df9","Type":"ContainerStarted","Data":"0e414e232605209ead25f3e69dec1a8bda856c69715b6fc03b7327f68a83ad04"} Feb 02 17:22:57 crc kubenswrapper[4835]: I0202 17:22:57.544125 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 17:22:58 crc kubenswrapper[4835]: I0202 17:22:58.555295 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm94s" event={"ID":"d9d9c767-3ef6-48f2-b9d3-925c673f2df9","Type":"ContainerStarted","Data":"d3ed139aeb68d989a03371b67a4e10d31f2834bd1abfc16bbdca994df1d105d1"} Feb 02 17:22:59 crc kubenswrapper[4835]: I0202 17:22:59.569921 4835 generic.go:334] "Generic (PLEG): container finished" podID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" containerID="d3ed139aeb68d989a03371b67a4e10d31f2834bd1abfc16bbdca994df1d105d1" exitCode=0 Feb 02 17:22:59 crc kubenswrapper[4835]: I0202 17:22:59.569981 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm94s" event={"ID":"d9d9c767-3ef6-48f2-b9d3-925c673f2df9","Type":"ContainerDied","Data":"d3ed139aeb68d989a03371b67a4e10d31f2834bd1abfc16bbdca994df1d105d1"} Feb 02 17:23:00 crc kubenswrapper[4835]: I0202 17:23:00.580338 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm94s" event={"ID":"d9d9c767-3ef6-48f2-b9d3-925c673f2df9","Type":"ContainerStarted","Data":"1e402c58268688a7909816154ababeebc4876e595899791c26430936f5788bce"} Feb 02 17:23:00 crc kubenswrapper[4835]: I0202 17:23:00.603898 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rm94s" podStartSLOduration=3.152020245 podStartE2EDuration="5.603879886s" podCreationTimestamp="2026-02-02 17:22:55 +0000 UTC" firstStartedPulling="2026-02-02 17:22:57.543934406 +0000 UTC m=+1969.165538476" lastFinishedPulling="2026-02-02 17:22:59.995794017 +0000 UTC m=+1971.617398117" observedRunningTime="2026-02-02 17:23:00.600552141 +0000 UTC m=+1972.222156261" watchObservedRunningTime="2026-02-02 17:23:00.603879886 +0000 UTC m=+1972.225483966" Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.189411 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:23:04 crc kubenswrapper[4835]: E0202 17:23:04.190506 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.777867 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.794881 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6xpg9"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.804045 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.812731 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-rmlcx"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.821703 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.828415 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.836589 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.843711 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.849685 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.855775 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.861489 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cbxd6"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.867394 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-pswrd"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.873562 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.879331 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hl9qv"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.885844 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-rmlcx"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.893929 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z6j8g"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.901774 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-srk49"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.908920 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-9dvqq"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.916060 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzdxz"] Feb 02 17:23:04 crc kubenswrapper[4835]: I0202 17:23:04.926362 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t2kql"] Feb 02 17:23:05 crc kubenswrapper[4835]: I0202 17:23:05.200338 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08bb0d31-52f5-4663-8331-847545aa8021" path="/var/lib/kubelet/pods/08bb0d31-52f5-4663-8331-847545aa8021/volumes" Feb 02 17:23:05 crc kubenswrapper[4835]: I0202 17:23:05.200976 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f0a6ea9-aebb-41e7-82f4-d0200894f9a8" path="/var/lib/kubelet/pods/0f0a6ea9-aebb-41e7-82f4-d0200894f9a8/volumes" Feb 02 17:23:05 crc kubenswrapper[4835]: I0202 17:23:05.201467 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12e6900a-36b2-4110-8f06-d37236112c63" path="/var/lib/kubelet/pods/12e6900a-36b2-4110-8f06-d37236112c63/volumes" Feb 02 17:23:05 crc kubenswrapper[4835]: I0202 17:23:05.201939 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b809273-7fb1-4ca6-a2f6-a65dae67678f" path="/var/lib/kubelet/pods/2b809273-7fb1-4ca6-a2f6-a65dae67678f/volumes" Feb 02 17:23:05 crc kubenswrapper[4835]: I0202 17:23:05.202867 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e3760b5-3825-432e-8c36-31ded716eb2f" path="/var/lib/kubelet/pods/6e3760b5-3825-432e-8c36-31ded716eb2f/volumes" Feb 02 17:23:05 crc kubenswrapper[4835]: I0202 17:23:05.203337 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9798ca55-f7f5-4c09-bffc-971c1efe8971" path="/var/lib/kubelet/pods/9798ca55-f7f5-4c09-bffc-971c1efe8971/volumes" Feb 02 17:23:05 crc kubenswrapper[4835]: I0202 17:23:05.203802 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a681c4be-8c84-473e-bbc9-b4eeacde7fa4" path="/var/lib/kubelet/pods/a681c4be-8c84-473e-bbc9-b4eeacde7fa4/volumes" Feb 02 17:23:05 crc kubenswrapper[4835]: I0202 17:23:05.204760 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca98e7db-a3cd-4839-991d-d8a08b956675" path="/var/lib/kubelet/pods/ca98e7db-a3cd-4839-991d-d8a08b956675/volumes" Feb 02 17:23:05 crc kubenswrapper[4835]: I0202 17:23:05.205238 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d04cb746-35ad-483f-854e-3c443906f580" path="/var/lib/kubelet/pods/d04cb746-35ad-483f-854e-3c443906f580/volumes" Feb 02 17:23:05 crc kubenswrapper[4835]: I0202 17:23:05.205726 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dedb5045-87f7-4433-9d73-06e16998ae40" path="/var/lib/kubelet/pods/dedb5045-87f7-4433-9d73-06e16998ae40/volumes" Feb 02 17:23:06 crc kubenswrapper[4835]: I0202 17:23:06.289513 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:23:06 crc kubenswrapper[4835]: I0202 17:23:06.289586 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:23:06 crc kubenswrapper[4835]: I0202 17:23:06.345348 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:23:06 crc kubenswrapper[4835]: I0202 17:23:06.683126 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:23:06 crc kubenswrapper[4835]: I0202 17:23:06.736813 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rm94s"] Feb 02 17:23:08 crc kubenswrapper[4835]: I0202 17:23:08.651706 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rm94s" podUID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" containerName="registry-server" containerID="cri-o://1e402c58268688a7909816154ababeebc4876e595899791c26430936f5788bce" gracePeriod=2 Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.210574 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6"] Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.211835 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.213693 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.214095 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.214293 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.215185 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.215397 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.224721 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6"] Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.328938 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.329017 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.329060 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctn89\" (UniqueName: \"kubernetes.io/projected/fbf199f3-f350-4171-ad1a-0eb83e623e22-kube-api-access-ctn89\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.329148 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.329175 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.430385 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.430701 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.430736 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.430787 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.430828 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctn89\" (UniqueName: \"kubernetes.io/projected/fbf199f3-f350-4171-ad1a-0eb83e623e22-kube-api-access-ctn89\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.440923 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.441430 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.446840 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.465221 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.469800 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctn89\" (UniqueName: \"kubernetes.io/projected/fbf199f3-f350-4171-ad1a-0eb83e623e22-kube-api-access-ctn89\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.531517 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.670176 4835 generic.go:334] "Generic (PLEG): container finished" podID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" containerID="1e402c58268688a7909816154ababeebc4876e595899791c26430936f5788bce" exitCode=0 Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.670250 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm94s" event={"ID":"d9d9c767-3ef6-48f2-b9d3-925c673f2df9","Type":"ContainerDied","Data":"1e402c58268688a7909816154ababeebc4876e595899791c26430936f5788bce"} Feb 02 17:23:10 crc kubenswrapper[4835]: I0202 17:23:10.996102 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.054388 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6"] Feb 02 17:23:11 crc kubenswrapper[4835]: W0202 17:23:11.054825 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbf199f3_f350_4171_ad1a_0eb83e623e22.slice/crio-f76d4d3f0b41726b4d5c3e18502ae2c5cfa74834356ce3db6021c6c7caddae0c WatchSource:0}: Error finding container f76d4d3f0b41726b4d5c3e18502ae2c5cfa74834356ce3db6021c6c7caddae0c: Status 404 returned error can't find the container with id f76d4d3f0b41726b4d5c3e18502ae2c5cfa74834356ce3db6021c6c7caddae0c Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.145895 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bdlx\" (UniqueName: \"kubernetes.io/projected/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-kube-api-access-9bdlx\") pod \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.146025 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-catalog-content\") pod \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.146171 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-utilities\") pod \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\" (UID: \"d9d9c767-3ef6-48f2-b9d3-925c673f2df9\") " Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.147018 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-utilities" (OuterVolumeSpecName: "utilities") pod "d9d9c767-3ef6-48f2-b9d3-925c673f2df9" (UID: "d9d9c767-3ef6-48f2-b9d3-925c673f2df9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.151311 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-kube-api-access-9bdlx" (OuterVolumeSpecName: "kube-api-access-9bdlx") pod "d9d9c767-3ef6-48f2-b9d3-925c673f2df9" (UID: "d9d9c767-3ef6-48f2-b9d3-925c673f2df9"). InnerVolumeSpecName "kube-api-access-9bdlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.248245 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.248292 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bdlx\" (UniqueName: \"kubernetes.io/projected/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-kube-api-access-9bdlx\") on node \"crc\" DevicePath \"\"" Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.261010 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9d9c767-3ef6-48f2-b9d3-925c673f2df9" (UID: "d9d9c767-3ef6-48f2-b9d3-925c673f2df9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.350359 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d9c767-3ef6-48f2-b9d3-925c673f2df9-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.680826 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" event={"ID":"fbf199f3-f350-4171-ad1a-0eb83e623e22","Type":"ContainerStarted","Data":"e8b003b3479e7524f44b4d10e5fc09a415b7e2fd2e9555434e638f4defd14758"} Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.680939 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" event={"ID":"fbf199f3-f350-4171-ad1a-0eb83e623e22","Type":"ContainerStarted","Data":"f76d4d3f0b41726b4d5c3e18502ae2c5cfa74834356ce3db6021c6c7caddae0c"} Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.685944 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm94s" event={"ID":"d9d9c767-3ef6-48f2-b9d3-925c673f2df9","Type":"ContainerDied","Data":"0e414e232605209ead25f3e69dec1a8bda856c69715b6fc03b7327f68a83ad04"} Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.685989 4835 scope.go:117] "RemoveContainer" containerID="1e402c58268688a7909816154ababeebc4876e595899791c26430936f5788bce" Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.686071 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rm94s" Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.705959 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" podStartSLOduration=1.290122308 podStartE2EDuration="1.705928252s" podCreationTimestamp="2026-02-02 17:23:10 +0000 UTC" firstStartedPulling="2026-02-02 17:23:11.057341165 +0000 UTC m=+1982.678945245" lastFinishedPulling="2026-02-02 17:23:11.473147109 +0000 UTC m=+1983.094751189" observedRunningTime="2026-02-02 17:23:11.701821175 +0000 UTC m=+1983.323425285" watchObservedRunningTime="2026-02-02 17:23:11.705928252 +0000 UTC m=+1983.327532382" Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.722533 4835 scope.go:117] "RemoveContainer" containerID="d3ed139aeb68d989a03371b67a4e10d31f2834bd1abfc16bbdca994df1d105d1" Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.735393 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rm94s"] Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.743360 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rm94s"] Feb 02 17:23:11 crc kubenswrapper[4835]: I0202 17:23:11.746265 4835 scope.go:117] "RemoveContainer" containerID="86525c43aec9b9863d877e48ef1f99dfff0e585f03b07143af5d2bdaf00c742d" Feb 02 17:23:12 crc kubenswrapper[4835]: I0202 17:23:12.825928 4835 scope.go:117] "RemoveContainer" containerID="4be4026455e714dc3e0fb11423e89471f8352b6a646a13dc82724d38ff003465" Feb 02 17:23:12 crc kubenswrapper[4835]: I0202 17:23:12.855349 4835 scope.go:117] "RemoveContainer" containerID="c50efc1128a8d979799dffcd562cb64b1af0d2133b130187d442aad0e8ef5688" Feb 02 17:23:12 crc kubenswrapper[4835]: I0202 17:23:12.933301 4835 scope.go:117] "RemoveContainer" containerID="bbd2052111b641a4ab901e87a92e9b75841bb197c69d06f41fd37dffb7812984" Feb 02 17:23:12 crc kubenswrapper[4835]: I0202 17:23:12.968074 4835 scope.go:117] "RemoveContainer" containerID="2d160de772ee3ce919d1b438e8e7323db56e8d31d7e5b6106242c5f0bf86587f" Feb 02 17:23:13 crc kubenswrapper[4835]: I0202 17:23:13.200843 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" path="/var/lib/kubelet/pods/d9d9c767-3ef6-48f2-b9d3-925c673f2df9/volumes" Feb 02 17:23:15 crc kubenswrapper[4835]: I0202 17:23:15.188991 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:23:15 crc kubenswrapper[4835]: I0202 17:23:15.726820 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"7b9b67ec0a70d873df2f1e945ad068f814ea600a71d949977865039aa50f1fc5"} Feb 02 17:23:23 crc kubenswrapper[4835]: I0202 17:23:23.813996 4835 generic.go:334] "Generic (PLEG): container finished" podID="fbf199f3-f350-4171-ad1a-0eb83e623e22" containerID="e8b003b3479e7524f44b4d10e5fc09a415b7e2fd2e9555434e638f4defd14758" exitCode=0 Feb 02 17:23:23 crc kubenswrapper[4835]: I0202 17:23:23.814129 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" event={"ID":"fbf199f3-f350-4171-ad1a-0eb83e623e22","Type":"ContainerDied","Data":"e8b003b3479e7524f44b4d10e5fc09a415b7e2fd2e9555434e638f4defd14758"} Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.221123 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.316475 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ceph\") pod \"fbf199f3-f350-4171-ad1a-0eb83e623e22\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.316539 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-inventory\") pod \"fbf199f3-f350-4171-ad1a-0eb83e623e22\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.316576 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-repo-setup-combined-ca-bundle\") pod \"fbf199f3-f350-4171-ad1a-0eb83e623e22\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.316619 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctn89\" (UniqueName: \"kubernetes.io/projected/fbf199f3-f350-4171-ad1a-0eb83e623e22-kube-api-access-ctn89\") pod \"fbf199f3-f350-4171-ad1a-0eb83e623e22\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.317337 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ssh-key-openstack-edpm-ipam\") pod \"fbf199f3-f350-4171-ad1a-0eb83e623e22\" (UID: \"fbf199f3-f350-4171-ad1a-0eb83e623e22\") " Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.321221 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbf199f3-f350-4171-ad1a-0eb83e623e22-kube-api-access-ctn89" (OuterVolumeSpecName: "kube-api-access-ctn89") pod "fbf199f3-f350-4171-ad1a-0eb83e623e22" (UID: "fbf199f3-f350-4171-ad1a-0eb83e623e22"). InnerVolumeSpecName "kube-api-access-ctn89". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.321903 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ceph" (OuterVolumeSpecName: "ceph") pod "fbf199f3-f350-4171-ad1a-0eb83e623e22" (UID: "fbf199f3-f350-4171-ad1a-0eb83e623e22"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.322465 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "fbf199f3-f350-4171-ad1a-0eb83e623e22" (UID: "fbf199f3-f350-4171-ad1a-0eb83e623e22"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.340090 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "fbf199f3-f350-4171-ad1a-0eb83e623e22" (UID: "fbf199f3-f350-4171-ad1a-0eb83e623e22"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.345714 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-inventory" (OuterVolumeSpecName: "inventory") pod "fbf199f3-f350-4171-ad1a-0eb83e623e22" (UID: "fbf199f3-f350-4171-ad1a-0eb83e623e22"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.419245 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctn89\" (UniqueName: \"kubernetes.io/projected/fbf199f3-f350-4171-ad1a-0eb83e623e22-kube-api-access-ctn89\") on node \"crc\" DevicePath \"\"" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.419311 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.419321 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.419331 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.419342 4835 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf199f3-f350-4171-ad1a-0eb83e623e22-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.839865 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" event={"ID":"fbf199f3-f350-4171-ad1a-0eb83e623e22","Type":"ContainerDied","Data":"f76d4d3f0b41726b4d5c3e18502ae2c5cfa74834356ce3db6021c6c7caddae0c"} Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.840316 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f76d4d3f0b41726b4d5c3e18502ae2c5cfa74834356ce3db6021c6c7caddae0c" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.840135 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.943454 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw"] Feb 02 17:23:25 crc kubenswrapper[4835]: E0202 17:23:25.943863 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" containerName="registry-server" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.943885 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" containerName="registry-server" Feb 02 17:23:25 crc kubenswrapper[4835]: E0202 17:23:25.943904 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" containerName="extract-utilities" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.943911 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" containerName="extract-utilities" Feb 02 17:23:25 crc kubenswrapper[4835]: E0202 17:23:25.943936 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" containerName="extract-content" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.943944 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" containerName="extract-content" Feb 02 17:23:25 crc kubenswrapper[4835]: E0202 17:23:25.943959 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbf199f3-f350-4171-ad1a-0eb83e623e22" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.943968 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbf199f3-f350-4171-ad1a-0eb83e623e22" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.944173 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9d9c767-3ef6-48f2-b9d3-925c673f2df9" containerName="registry-server" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.944198 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbf199f3-f350-4171-ad1a-0eb83e623e22" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.944916 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.946924 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.948023 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.948581 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.949141 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.952526 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:23:25 crc kubenswrapper[4835]: I0202 17:23:25.961243 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw"] Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.029592 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.029653 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.029696 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.029923 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nqzr\" (UniqueName: \"kubernetes.io/projected/29d9c1e8-035d-485c-bbfa-2c0328468c6a-kube-api-access-5nqzr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.030116 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.131726 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.131835 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.131913 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.132118 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nqzr\" (UniqueName: \"kubernetes.io/projected/29d9c1e8-035d-485c-bbfa-2c0328468c6a-kube-api-access-5nqzr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.132223 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.137138 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.137196 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.139426 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.151905 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.152401 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nqzr\" (UniqueName: \"kubernetes.io/projected/29d9c1e8-035d-485c-bbfa-2c0328468c6a-kube-api-access-5nqzr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.263848 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:23:26 crc kubenswrapper[4835]: I0202 17:23:26.839891 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw"] Feb 02 17:23:27 crc kubenswrapper[4835]: I0202 17:23:27.862142 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" event={"ID":"29d9c1e8-035d-485c-bbfa-2c0328468c6a","Type":"ContainerStarted","Data":"f58da85d00e261886139286c3cb4e323b1243c7601fd81f3ae106e5d8b44b40d"} Feb 02 17:23:27 crc kubenswrapper[4835]: I0202 17:23:27.862835 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" event={"ID":"29d9c1e8-035d-485c-bbfa-2c0328468c6a","Type":"ContainerStarted","Data":"8d1f2a28edde0a2fa8b68f9542e60545d0878d426340cb3d9d801487c1d271c3"} Feb 02 17:23:27 crc kubenswrapper[4835]: I0202 17:23:27.887697 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" podStartSLOduration=2.476449477 podStartE2EDuration="2.887678652s" podCreationTimestamp="2026-02-02 17:23:25 +0000 UTC" firstStartedPulling="2026-02-02 17:23:26.854503774 +0000 UTC m=+1998.476107854" lastFinishedPulling="2026-02-02 17:23:27.265732949 +0000 UTC m=+1998.887337029" observedRunningTime="2026-02-02 17:23:27.88339834 +0000 UTC m=+1999.505002420" watchObservedRunningTime="2026-02-02 17:23:27.887678652 +0000 UTC m=+1999.509282732" Feb 02 17:24:13 crc kubenswrapper[4835]: I0202 17:24:13.126527 4835 scope.go:117] "RemoveContainer" containerID="39e0394980bc7b832551b411c6360de66fa1b01dd16c79a847db902be7ec33db" Feb 02 17:24:13 crc kubenswrapper[4835]: I0202 17:24:13.172058 4835 scope.go:117] "RemoveContainer" containerID="45c1c9ca8209abd4d932a5c5974e0e50a2c9f34ff3c84b4cb9d4920bb0c9c37b" Feb 02 17:24:13 crc kubenswrapper[4835]: I0202 17:24:13.242235 4835 scope.go:117] "RemoveContainer" containerID="d862a07fd27a783e73b6eae2d6546d7839e2a8535c9dc06c6d14b0e568e71af8" Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.304484 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4brtp"] Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.318954 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4brtp"] Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.319057 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.390584 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-utilities\") pod \"community-operators-4brtp\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.390674 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrhbm\" (UniqueName: \"kubernetes.io/projected/3e40bc6b-0758-4214-a772-c30c0750c784-kube-api-access-jrhbm\") pod \"community-operators-4brtp\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.390702 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-catalog-content\") pod \"community-operators-4brtp\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.492649 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrhbm\" (UniqueName: \"kubernetes.io/projected/3e40bc6b-0758-4214-a772-c30c0750c784-kube-api-access-jrhbm\") pod \"community-operators-4brtp\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.492696 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-catalog-content\") pod \"community-operators-4brtp\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.493027 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-utilities\") pod \"community-operators-4brtp\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.493475 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-catalog-content\") pod \"community-operators-4brtp\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.493561 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-utilities\") pod \"community-operators-4brtp\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.514822 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrhbm\" (UniqueName: \"kubernetes.io/projected/3e40bc6b-0758-4214-a772-c30c0750c784-kube-api-access-jrhbm\") pod \"community-operators-4brtp\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:34 crc kubenswrapper[4835]: I0202 17:24:34.656846 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.176154 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4brtp"] Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.281116 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2swsf"] Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.282961 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.300709 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2swsf"] Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.412873 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-utilities\") pod \"certified-operators-2swsf\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.412940 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6c8j\" (UniqueName: \"kubernetes.io/projected/795e5967-2d85-47bc-be57-ba3812a137af-kube-api-access-q6c8j\") pod \"certified-operators-2swsf\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.413025 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-catalog-content\") pod \"certified-operators-2swsf\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.484699 4835 generic.go:334] "Generic (PLEG): container finished" podID="3e40bc6b-0758-4214-a772-c30c0750c784" containerID="071154cc55941fbd03a4ee9f8e8a64631aaa81bba81e1a1aa318d67d224cf1df" exitCode=0 Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.484803 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4brtp" event={"ID":"3e40bc6b-0758-4214-a772-c30c0750c784","Type":"ContainerDied","Data":"071154cc55941fbd03a4ee9f8e8a64631aaa81bba81e1a1aa318d67d224cf1df"} Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.485005 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4brtp" event={"ID":"3e40bc6b-0758-4214-a772-c30c0750c784","Type":"ContainerStarted","Data":"f22c66d61fe958baddfc38af0afea05a74cee1388400fe3896f99f465f02567c"} Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.514785 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-utilities\") pod \"certified-operators-2swsf\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.514860 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6c8j\" (UniqueName: \"kubernetes.io/projected/795e5967-2d85-47bc-be57-ba3812a137af-kube-api-access-q6c8j\") pod \"certified-operators-2swsf\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.514920 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-catalog-content\") pod \"certified-operators-2swsf\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.515396 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-utilities\") pod \"certified-operators-2swsf\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.515432 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-catalog-content\") pod \"certified-operators-2swsf\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.539080 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6c8j\" (UniqueName: \"kubernetes.io/projected/795e5967-2d85-47bc-be57-ba3812a137af-kube-api-access-q6c8j\") pod \"certified-operators-2swsf\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:35 crc kubenswrapper[4835]: I0202 17:24:35.675499 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:36 crc kubenswrapper[4835]: I0202 17:24:36.009080 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2swsf"] Feb 02 17:24:36 crc kubenswrapper[4835]: I0202 17:24:36.492607 4835 generic.go:334] "Generic (PLEG): container finished" podID="795e5967-2d85-47bc-be57-ba3812a137af" containerID="a630dc7cab38a66d826a39619f0c1b9f866df2ecb4f9c6707195f9d4f81f25c5" exitCode=0 Feb 02 17:24:36 crc kubenswrapper[4835]: I0202 17:24:36.492812 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2swsf" event={"ID":"795e5967-2d85-47bc-be57-ba3812a137af","Type":"ContainerDied","Data":"a630dc7cab38a66d826a39619f0c1b9f866df2ecb4f9c6707195f9d4f81f25c5"} Feb 02 17:24:36 crc kubenswrapper[4835]: I0202 17:24:36.492888 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2swsf" event={"ID":"795e5967-2d85-47bc-be57-ba3812a137af","Type":"ContainerStarted","Data":"b2f6733fc9f6d7d7c6c39e21921ec355c77083fd40a43ff505b2ca1e2b16728f"} Feb 02 17:24:36 crc kubenswrapper[4835]: I0202 17:24:36.495235 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4brtp" event={"ID":"3e40bc6b-0758-4214-a772-c30c0750c784","Type":"ContainerStarted","Data":"ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397"} Feb 02 17:24:37 crc kubenswrapper[4835]: I0202 17:24:37.508360 4835 generic.go:334] "Generic (PLEG): container finished" podID="3e40bc6b-0758-4214-a772-c30c0750c784" containerID="ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397" exitCode=0 Feb 02 17:24:37 crc kubenswrapper[4835]: I0202 17:24:37.508587 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4brtp" event={"ID":"3e40bc6b-0758-4214-a772-c30c0750c784","Type":"ContainerDied","Data":"ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397"} Feb 02 17:24:37 crc kubenswrapper[4835]: I0202 17:24:37.511021 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2swsf" event={"ID":"795e5967-2d85-47bc-be57-ba3812a137af","Type":"ContainerStarted","Data":"19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b"} Feb 02 17:24:38 crc kubenswrapper[4835]: I0202 17:24:38.521819 4835 generic.go:334] "Generic (PLEG): container finished" podID="795e5967-2d85-47bc-be57-ba3812a137af" containerID="19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b" exitCode=0 Feb 02 17:24:38 crc kubenswrapper[4835]: I0202 17:24:38.521916 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2swsf" event={"ID":"795e5967-2d85-47bc-be57-ba3812a137af","Type":"ContainerDied","Data":"19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b"} Feb 02 17:24:38 crc kubenswrapper[4835]: I0202 17:24:38.525759 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4brtp" event={"ID":"3e40bc6b-0758-4214-a772-c30c0750c784","Type":"ContainerStarted","Data":"7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992"} Feb 02 17:24:38 crc kubenswrapper[4835]: I0202 17:24:38.561700 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4brtp" podStartSLOduration=1.84142362 podStartE2EDuration="4.561682564s" podCreationTimestamp="2026-02-02 17:24:34 +0000 UTC" firstStartedPulling="2026-02-02 17:24:35.486394339 +0000 UTC m=+2067.107998419" lastFinishedPulling="2026-02-02 17:24:38.206653273 +0000 UTC m=+2069.828257363" observedRunningTime="2026-02-02 17:24:38.560074108 +0000 UTC m=+2070.181678198" watchObservedRunningTime="2026-02-02 17:24:38.561682564 +0000 UTC m=+2070.183286654" Feb 02 17:24:39 crc kubenswrapper[4835]: I0202 17:24:39.536623 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2swsf" event={"ID":"795e5967-2d85-47bc-be57-ba3812a137af","Type":"ContainerStarted","Data":"98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8"} Feb 02 17:24:39 crc kubenswrapper[4835]: I0202 17:24:39.564130 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2swsf" podStartSLOduration=2.093886988 podStartE2EDuration="4.56410845s" podCreationTimestamp="2026-02-02 17:24:35 +0000 UTC" firstStartedPulling="2026-02-02 17:24:36.494450134 +0000 UTC m=+2068.116054214" lastFinishedPulling="2026-02-02 17:24:38.964671566 +0000 UTC m=+2070.586275676" observedRunningTime="2026-02-02 17:24:39.554157977 +0000 UTC m=+2071.175762067" watchObservedRunningTime="2026-02-02 17:24:39.56410845 +0000 UTC m=+2071.185712540" Feb 02 17:24:44 crc kubenswrapper[4835]: I0202 17:24:44.657199 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:44 crc kubenswrapper[4835]: I0202 17:24:44.657786 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:44 crc kubenswrapper[4835]: I0202 17:24:44.702014 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:45 crc kubenswrapper[4835]: I0202 17:24:45.630446 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:45 crc kubenswrapper[4835]: I0202 17:24:45.699175 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:45 crc kubenswrapper[4835]: I0202 17:24:45.699235 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:45 crc kubenswrapper[4835]: I0202 17:24:45.717178 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4brtp"] Feb 02 17:24:45 crc kubenswrapper[4835]: I0202 17:24:45.742480 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:46 crc kubenswrapper[4835]: I0202 17:24:46.647101 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:47 crc kubenswrapper[4835]: I0202 17:24:47.468988 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2swsf"] Feb 02 17:24:47 crc kubenswrapper[4835]: I0202 17:24:47.596749 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4brtp" podUID="3e40bc6b-0758-4214-a772-c30c0750c784" containerName="registry-server" containerID="cri-o://7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992" gracePeriod=2 Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.097592 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.284217 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrhbm\" (UniqueName: \"kubernetes.io/projected/3e40bc6b-0758-4214-a772-c30c0750c784-kube-api-access-jrhbm\") pod \"3e40bc6b-0758-4214-a772-c30c0750c784\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.284375 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-utilities\") pod \"3e40bc6b-0758-4214-a772-c30c0750c784\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.284683 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-catalog-content\") pod \"3e40bc6b-0758-4214-a772-c30c0750c784\" (UID: \"3e40bc6b-0758-4214-a772-c30c0750c784\") " Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.285161 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-utilities" (OuterVolumeSpecName: "utilities") pod "3e40bc6b-0758-4214-a772-c30c0750c784" (UID: "3e40bc6b-0758-4214-a772-c30c0750c784"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.285508 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.290811 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e40bc6b-0758-4214-a772-c30c0750c784-kube-api-access-jrhbm" (OuterVolumeSpecName: "kube-api-access-jrhbm") pod "3e40bc6b-0758-4214-a772-c30c0750c784" (UID: "3e40bc6b-0758-4214-a772-c30c0750c784"). InnerVolumeSpecName "kube-api-access-jrhbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.345424 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3e40bc6b-0758-4214-a772-c30c0750c784" (UID: "3e40bc6b-0758-4214-a772-c30c0750c784"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.387371 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrhbm\" (UniqueName: \"kubernetes.io/projected/3e40bc6b-0758-4214-a772-c30c0750c784-kube-api-access-jrhbm\") on node \"crc\" DevicePath \"\"" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.387405 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e40bc6b-0758-4214-a772-c30c0750c784-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.606468 4835 generic.go:334] "Generic (PLEG): container finished" podID="3e40bc6b-0758-4214-a772-c30c0750c784" containerID="7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992" exitCode=0 Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.606519 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4brtp" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.606534 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4brtp" event={"ID":"3e40bc6b-0758-4214-a772-c30c0750c784","Type":"ContainerDied","Data":"7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992"} Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.606720 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4brtp" event={"ID":"3e40bc6b-0758-4214-a772-c30c0750c784","Type":"ContainerDied","Data":"f22c66d61fe958baddfc38af0afea05a74cee1388400fe3896f99f465f02567c"} Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.606741 4835 scope.go:117] "RemoveContainer" containerID="7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.607113 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2swsf" podUID="795e5967-2d85-47bc-be57-ba3812a137af" containerName="registry-server" containerID="cri-o://98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8" gracePeriod=2 Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.630514 4835 scope.go:117] "RemoveContainer" containerID="ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.653052 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4brtp"] Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.658988 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4brtp"] Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.662135 4835 scope.go:117] "RemoveContainer" containerID="071154cc55941fbd03a4ee9f8e8a64631aaa81bba81e1a1aa318d67d224cf1df" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.792763 4835 scope.go:117] "RemoveContainer" containerID="7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992" Feb 02 17:24:48 crc kubenswrapper[4835]: E0202 17:24:48.793897 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992\": container with ID starting with 7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992 not found: ID does not exist" containerID="7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.793946 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992"} err="failed to get container status \"7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992\": rpc error: code = NotFound desc = could not find container \"7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992\": container with ID starting with 7b2abe9bc23673b74f73aeb2df2089203b6f98508cc86fe317226686cebfd992 not found: ID does not exist" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.793975 4835 scope.go:117] "RemoveContainer" containerID="ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397" Feb 02 17:24:48 crc kubenswrapper[4835]: E0202 17:24:48.794388 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397\": container with ID starting with ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397 not found: ID does not exist" containerID="ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.794441 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397"} err="failed to get container status \"ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397\": rpc error: code = NotFound desc = could not find container \"ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397\": container with ID starting with ee67f3e19191aef1f9f802ef3af631bf0c71b465c7b518db56a405a5dc18d397 not found: ID does not exist" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.794467 4835 scope.go:117] "RemoveContainer" containerID="071154cc55941fbd03a4ee9f8e8a64631aaa81bba81e1a1aa318d67d224cf1df" Feb 02 17:24:48 crc kubenswrapper[4835]: E0202 17:24:48.794791 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"071154cc55941fbd03a4ee9f8e8a64631aaa81bba81e1a1aa318d67d224cf1df\": container with ID starting with 071154cc55941fbd03a4ee9f8e8a64631aaa81bba81e1a1aa318d67d224cf1df not found: ID does not exist" containerID="071154cc55941fbd03a4ee9f8e8a64631aaa81bba81e1a1aa318d67d224cf1df" Feb 02 17:24:48 crc kubenswrapper[4835]: I0202 17:24:48.794826 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"071154cc55941fbd03a4ee9f8e8a64631aaa81bba81e1a1aa318d67d224cf1df"} err="failed to get container status \"071154cc55941fbd03a4ee9f8e8a64631aaa81bba81e1a1aa318d67d224cf1df\": rpc error: code = NotFound desc = could not find container \"071154cc55941fbd03a4ee9f8e8a64631aaa81bba81e1a1aa318d67d224cf1df\": container with ID starting with 071154cc55941fbd03a4ee9f8e8a64631aaa81bba81e1a1aa318d67d224cf1df not found: ID does not exist" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.132961 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.200190 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e40bc6b-0758-4214-a772-c30c0750c784" path="/var/lib/kubelet/pods/3e40bc6b-0758-4214-a772-c30c0750c784/volumes" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.301559 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-catalog-content\") pod \"795e5967-2d85-47bc-be57-ba3812a137af\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.301785 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6c8j\" (UniqueName: \"kubernetes.io/projected/795e5967-2d85-47bc-be57-ba3812a137af-kube-api-access-q6c8j\") pod \"795e5967-2d85-47bc-be57-ba3812a137af\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.301897 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-utilities\") pod \"795e5967-2d85-47bc-be57-ba3812a137af\" (UID: \"795e5967-2d85-47bc-be57-ba3812a137af\") " Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.302536 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-utilities" (OuterVolumeSpecName: "utilities") pod "795e5967-2d85-47bc-be57-ba3812a137af" (UID: "795e5967-2d85-47bc-be57-ba3812a137af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.308546 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/795e5967-2d85-47bc-be57-ba3812a137af-kube-api-access-q6c8j" (OuterVolumeSpecName: "kube-api-access-q6c8j") pod "795e5967-2d85-47bc-be57-ba3812a137af" (UID: "795e5967-2d85-47bc-be57-ba3812a137af"). InnerVolumeSpecName "kube-api-access-q6c8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.350610 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "795e5967-2d85-47bc-be57-ba3812a137af" (UID: "795e5967-2d85-47bc-be57-ba3812a137af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.404053 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.404101 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/795e5967-2d85-47bc-be57-ba3812a137af-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.404118 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6c8j\" (UniqueName: \"kubernetes.io/projected/795e5967-2d85-47bc-be57-ba3812a137af-kube-api-access-q6c8j\") on node \"crc\" DevicePath \"\"" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.617956 4835 generic.go:334] "Generic (PLEG): container finished" podID="795e5967-2d85-47bc-be57-ba3812a137af" containerID="98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8" exitCode=0 Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.617996 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2swsf" event={"ID":"795e5967-2d85-47bc-be57-ba3812a137af","Type":"ContainerDied","Data":"98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8"} Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.618021 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2swsf" event={"ID":"795e5967-2d85-47bc-be57-ba3812a137af","Type":"ContainerDied","Data":"b2f6733fc9f6d7d7c6c39e21921ec355c77083fd40a43ff505b2ca1e2b16728f"} Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.618042 4835 scope.go:117] "RemoveContainer" containerID="98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.618043 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2swsf" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.637132 4835 scope.go:117] "RemoveContainer" containerID="19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.657486 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2swsf"] Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.664662 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2swsf"] Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.685959 4835 scope.go:117] "RemoveContainer" containerID="a630dc7cab38a66d826a39619f0c1b9f866df2ecb4f9c6707195f9d4f81f25c5" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.707436 4835 scope.go:117] "RemoveContainer" containerID="98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8" Feb 02 17:24:49 crc kubenswrapper[4835]: E0202 17:24:49.707964 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8\": container with ID starting with 98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8 not found: ID does not exist" containerID="98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.708021 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8"} err="failed to get container status \"98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8\": rpc error: code = NotFound desc = could not find container \"98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8\": container with ID starting with 98eb1bd563c4657f794ea63f3ea76b7d70a1112183c725d76bb3f267bae58ea8 not found: ID does not exist" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.708057 4835 scope.go:117] "RemoveContainer" containerID="19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b" Feb 02 17:24:49 crc kubenswrapper[4835]: E0202 17:24:49.708432 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b\": container with ID starting with 19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b not found: ID does not exist" containerID="19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.708465 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b"} err="failed to get container status \"19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b\": rpc error: code = NotFound desc = could not find container \"19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b\": container with ID starting with 19fda0021f0962c1cbb24f0dd45e1d1ed55ee12b62f8421a0410cbff20ce6e3b not found: ID does not exist" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.708523 4835 scope.go:117] "RemoveContainer" containerID="a630dc7cab38a66d826a39619f0c1b9f866df2ecb4f9c6707195f9d4f81f25c5" Feb 02 17:24:49 crc kubenswrapper[4835]: E0202 17:24:49.708872 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a630dc7cab38a66d826a39619f0c1b9f866df2ecb4f9c6707195f9d4f81f25c5\": container with ID starting with a630dc7cab38a66d826a39619f0c1b9f866df2ecb4f9c6707195f9d4f81f25c5 not found: ID does not exist" containerID="a630dc7cab38a66d826a39619f0c1b9f866df2ecb4f9c6707195f9d4f81f25c5" Feb 02 17:24:49 crc kubenswrapper[4835]: I0202 17:24:49.708896 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a630dc7cab38a66d826a39619f0c1b9f866df2ecb4f9c6707195f9d4f81f25c5"} err="failed to get container status \"a630dc7cab38a66d826a39619f0c1b9f866df2ecb4f9c6707195f9d4f81f25c5\": rpc error: code = NotFound desc = could not find container \"a630dc7cab38a66d826a39619f0c1b9f866df2ecb4f9c6707195f9d4f81f25c5\": container with ID starting with a630dc7cab38a66d826a39619f0c1b9f866df2ecb4f9c6707195f9d4f81f25c5 not found: ID does not exist" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.205241 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="795e5967-2d85-47bc-be57-ba3812a137af" path="/var/lib/kubelet/pods/795e5967-2d85-47bc-be57-ba3812a137af/volumes" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.497202 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n757k"] Feb 02 17:24:51 crc kubenswrapper[4835]: E0202 17:24:51.497911 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e40bc6b-0758-4214-a772-c30c0750c784" containerName="registry-server" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.497940 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e40bc6b-0758-4214-a772-c30c0750c784" containerName="registry-server" Feb 02 17:24:51 crc kubenswrapper[4835]: E0202 17:24:51.497974 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="795e5967-2d85-47bc-be57-ba3812a137af" containerName="extract-utilities" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.497991 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="795e5967-2d85-47bc-be57-ba3812a137af" containerName="extract-utilities" Feb 02 17:24:51 crc kubenswrapper[4835]: E0202 17:24:51.498023 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e40bc6b-0758-4214-a772-c30c0750c784" containerName="extract-utilities" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.498040 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e40bc6b-0758-4214-a772-c30c0750c784" containerName="extract-utilities" Feb 02 17:24:51 crc kubenswrapper[4835]: E0202 17:24:51.498076 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e40bc6b-0758-4214-a772-c30c0750c784" containerName="extract-content" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.498091 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e40bc6b-0758-4214-a772-c30c0750c784" containerName="extract-content" Feb 02 17:24:51 crc kubenswrapper[4835]: E0202 17:24:51.498110 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="795e5967-2d85-47bc-be57-ba3812a137af" containerName="extract-content" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.498126 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="795e5967-2d85-47bc-be57-ba3812a137af" containerName="extract-content" Feb 02 17:24:51 crc kubenswrapper[4835]: E0202 17:24:51.498193 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="795e5967-2d85-47bc-be57-ba3812a137af" containerName="registry-server" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.498212 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="795e5967-2d85-47bc-be57-ba3812a137af" containerName="registry-server" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.498694 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="795e5967-2d85-47bc-be57-ba3812a137af" containerName="registry-server" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.498741 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e40bc6b-0758-4214-a772-c30c0750c784" containerName="registry-server" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.501457 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.521277 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n757k"] Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.565120 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-utilities\") pod \"redhat-marketplace-n757k\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.565653 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wqlz\" (UniqueName: \"kubernetes.io/projected/c7155880-4527-4cda-86c4-655abbf7a4f9-kube-api-access-2wqlz\") pod \"redhat-marketplace-n757k\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.565704 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-catalog-content\") pod \"redhat-marketplace-n757k\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.667383 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-utilities\") pod \"redhat-marketplace-n757k\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.667537 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wqlz\" (UniqueName: \"kubernetes.io/projected/c7155880-4527-4cda-86c4-655abbf7a4f9-kube-api-access-2wqlz\") pod \"redhat-marketplace-n757k\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.667575 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-catalog-content\") pod \"redhat-marketplace-n757k\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.667949 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-utilities\") pod \"redhat-marketplace-n757k\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.668002 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-catalog-content\") pod \"redhat-marketplace-n757k\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.686825 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wqlz\" (UniqueName: \"kubernetes.io/projected/c7155880-4527-4cda-86c4-655abbf7a4f9-kube-api-access-2wqlz\") pod \"redhat-marketplace-n757k\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:24:51 crc kubenswrapper[4835]: I0202 17:24:51.878942 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:24:52 crc kubenswrapper[4835]: I0202 17:24:52.362131 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n757k"] Feb 02 17:24:52 crc kubenswrapper[4835]: W0202 17:24:52.364761 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc7155880_4527_4cda_86c4_655abbf7a4f9.slice/crio-9cf0b8c3383b18c11f16814bf5d08300517ea7807b1cbc032d3f49e0f35abbfa WatchSource:0}: Error finding container 9cf0b8c3383b18c11f16814bf5d08300517ea7807b1cbc032d3f49e0f35abbfa: Status 404 returned error can't find the container with id 9cf0b8c3383b18c11f16814bf5d08300517ea7807b1cbc032d3f49e0f35abbfa Feb 02 17:24:52 crc kubenswrapper[4835]: I0202 17:24:52.649621 4835 generic.go:334] "Generic (PLEG): container finished" podID="c7155880-4527-4cda-86c4-655abbf7a4f9" containerID="21cf11f01ccf305b52708e310a777e3079b65f066cba8d596051fa61df5e8443" exitCode=0 Feb 02 17:24:52 crc kubenswrapper[4835]: I0202 17:24:52.649668 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n757k" event={"ID":"c7155880-4527-4cda-86c4-655abbf7a4f9","Type":"ContainerDied","Data":"21cf11f01ccf305b52708e310a777e3079b65f066cba8d596051fa61df5e8443"} Feb 02 17:24:52 crc kubenswrapper[4835]: I0202 17:24:52.649712 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n757k" event={"ID":"c7155880-4527-4cda-86c4-655abbf7a4f9","Type":"ContainerStarted","Data":"9cf0b8c3383b18c11f16814bf5d08300517ea7807b1cbc032d3f49e0f35abbfa"} Feb 02 17:24:54 crc kubenswrapper[4835]: I0202 17:24:54.670315 4835 generic.go:334] "Generic (PLEG): container finished" podID="c7155880-4527-4cda-86c4-655abbf7a4f9" containerID="3e4448341ebe23aa9d493da6b3c396aff064e224c13ac0ee7f34408d36cddbac" exitCode=0 Feb 02 17:24:54 crc kubenswrapper[4835]: I0202 17:24:54.670403 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n757k" event={"ID":"c7155880-4527-4cda-86c4-655abbf7a4f9","Type":"ContainerDied","Data":"3e4448341ebe23aa9d493da6b3c396aff064e224c13ac0ee7f34408d36cddbac"} Feb 02 17:24:55 crc kubenswrapper[4835]: I0202 17:24:55.680409 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n757k" event={"ID":"c7155880-4527-4cda-86c4-655abbf7a4f9","Type":"ContainerStarted","Data":"8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd"} Feb 02 17:24:55 crc kubenswrapper[4835]: I0202 17:24:55.706562 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n757k" podStartSLOduration=2.051004156 podStartE2EDuration="4.706535794s" podCreationTimestamp="2026-02-02 17:24:51 +0000 UTC" firstStartedPulling="2026-02-02 17:24:52.651157623 +0000 UTC m=+2084.272761703" lastFinishedPulling="2026-02-02 17:24:55.306689261 +0000 UTC m=+2086.928293341" observedRunningTime="2026-02-02 17:24:55.700856002 +0000 UTC m=+2087.322460122" watchObservedRunningTime="2026-02-02 17:24:55.706535794 +0000 UTC m=+2087.328139914" Feb 02 17:25:01 crc kubenswrapper[4835]: I0202 17:25:01.879574 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:25:01 crc kubenswrapper[4835]: I0202 17:25:01.880189 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:25:01 crc kubenswrapper[4835]: I0202 17:25:01.928066 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:25:02 crc kubenswrapper[4835]: I0202 17:25:02.770732 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:25:02 crc kubenswrapper[4835]: I0202 17:25:02.818359 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n757k"] Feb 02 17:25:04 crc kubenswrapper[4835]: I0202 17:25:04.747387 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n757k" podUID="c7155880-4527-4cda-86c4-655abbf7a4f9" containerName="registry-server" containerID="cri-o://8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd" gracePeriod=2 Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.191646 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.235745 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-catalog-content\") pod \"c7155880-4527-4cda-86c4-655abbf7a4f9\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.235859 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wqlz\" (UniqueName: \"kubernetes.io/projected/c7155880-4527-4cda-86c4-655abbf7a4f9-kube-api-access-2wqlz\") pod \"c7155880-4527-4cda-86c4-655abbf7a4f9\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.242367 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7155880-4527-4cda-86c4-655abbf7a4f9-kube-api-access-2wqlz" (OuterVolumeSpecName: "kube-api-access-2wqlz") pod "c7155880-4527-4cda-86c4-655abbf7a4f9" (UID: "c7155880-4527-4cda-86c4-655abbf7a4f9"). InnerVolumeSpecName "kube-api-access-2wqlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.261151 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c7155880-4527-4cda-86c4-655abbf7a4f9" (UID: "c7155880-4527-4cda-86c4-655abbf7a4f9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.338248 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-utilities\") pod \"c7155880-4527-4cda-86c4-655abbf7a4f9\" (UID: \"c7155880-4527-4cda-86c4-655abbf7a4f9\") " Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.338820 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.338846 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wqlz\" (UniqueName: \"kubernetes.io/projected/c7155880-4527-4cda-86c4-655abbf7a4f9-kube-api-access-2wqlz\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.339163 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-utilities" (OuterVolumeSpecName: "utilities") pod "c7155880-4527-4cda-86c4-655abbf7a4f9" (UID: "c7155880-4527-4cda-86c4-655abbf7a4f9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.440322 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7155880-4527-4cda-86c4-655abbf7a4f9-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.757597 4835 generic.go:334] "Generic (PLEG): container finished" podID="c7155880-4527-4cda-86c4-655abbf7a4f9" containerID="8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd" exitCode=0 Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.757639 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n757k" event={"ID":"c7155880-4527-4cda-86c4-655abbf7a4f9","Type":"ContainerDied","Data":"8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd"} Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.757665 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n757k" event={"ID":"c7155880-4527-4cda-86c4-655abbf7a4f9","Type":"ContainerDied","Data":"9cf0b8c3383b18c11f16814bf5d08300517ea7807b1cbc032d3f49e0f35abbfa"} Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.757666 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n757k" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.757680 4835 scope.go:117] "RemoveContainer" containerID="8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.784557 4835 scope.go:117] "RemoveContainer" containerID="3e4448341ebe23aa9d493da6b3c396aff064e224c13ac0ee7f34408d36cddbac" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.798512 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n757k"] Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.805685 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n757k"] Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.835606 4835 scope.go:117] "RemoveContainer" containerID="21cf11f01ccf305b52708e310a777e3079b65f066cba8d596051fa61df5e8443" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.856066 4835 scope.go:117] "RemoveContainer" containerID="8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd" Feb 02 17:25:05 crc kubenswrapper[4835]: E0202 17:25:05.856509 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd\": container with ID starting with 8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd not found: ID does not exist" containerID="8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.856541 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd"} err="failed to get container status \"8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd\": rpc error: code = NotFound desc = could not find container \"8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd\": container with ID starting with 8089c42a70c9c2013e844f68efba0b9d681c80ad4c2432494d60cec6386882bd not found: ID does not exist" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.856564 4835 scope.go:117] "RemoveContainer" containerID="3e4448341ebe23aa9d493da6b3c396aff064e224c13ac0ee7f34408d36cddbac" Feb 02 17:25:05 crc kubenswrapper[4835]: E0202 17:25:05.856863 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e4448341ebe23aa9d493da6b3c396aff064e224c13ac0ee7f34408d36cddbac\": container with ID starting with 3e4448341ebe23aa9d493da6b3c396aff064e224c13ac0ee7f34408d36cddbac not found: ID does not exist" containerID="3e4448341ebe23aa9d493da6b3c396aff064e224c13ac0ee7f34408d36cddbac" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.856891 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e4448341ebe23aa9d493da6b3c396aff064e224c13ac0ee7f34408d36cddbac"} err="failed to get container status \"3e4448341ebe23aa9d493da6b3c396aff064e224c13ac0ee7f34408d36cddbac\": rpc error: code = NotFound desc = could not find container \"3e4448341ebe23aa9d493da6b3c396aff064e224c13ac0ee7f34408d36cddbac\": container with ID starting with 3e4448341ebe23aa9d493da6b3c396aff064e224c13ac0ee7f34408d36cddbac not found: ID does not exist" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.856909 4835 scope.go:117] "RemoveContainer" containerID="21cf11f01ccf305b52708e310a777e3079b65f066cba8d596051fa61df5e8443" Feb 02 17:25:05 crc kubenswrapper[4835]: E0202 17:25:05.857331 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21cf11f01ccf305b52708e310a777e3079b65f066cba8d596051fa61df5e8443\": container with ID starting with 21cf11f01ccf305b52708e310a777e3079b65f066cba8d596051fa61df5e8443 not found: ID does not exist" containerID="21cf11f01ccf305b52708e310a777e3079b65f066cba8d596051fa61df5e8443" Feb 02 17:25:05 crc kubenswrapper[4835]: I0202 17:25:05.857354 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21cf11f01ccf305b52708e310a777e3079b65f066cba8d596051fa61df5e8443"} err="failed to get container status \"21cf11f01ccf305b52708e310a777e3079b65f066cba8d596051fa61df5e8443\": rpc error: code = NotFound desc = could not find container \"21cf11f01ccf305b52708e310a777e3079b65f066cba8d596051fa61df5e8443\": container with ID starting with 21cf11f01ccf305b52708e310a777e3079b65f066cba8d596051fa61df5e8443 not found: ID does not exist" Feb 02 17:25:07 crc kubenswrapper[4835]: I0202 17:25:07.202850 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7155880-4527-4cda-86c4-655abbf7a4f9" path="/var/lib/kubelet/pods/c7155880-4527-4cda-86c4-655abbf7a4f9/volumes" Feb 02 17:25:13 crc kubenswrapper[4835]: I0202 17:25:13.358411 4835 scope.go:117] "RemoveContainer" containerID="f5a433a469eb0c69a8526b8b4bee40ecf4d6ce95867bf3d5a3fcca4f8fb2d0ab" Feb 02 17:25:13 crc kubenswrapper[4835]: I0202 17:25:13.394781 4835 scope.go:117] "RemoveContainer" containerID="29af561a87a222d4353e2e6ca0de8bca62f286233bfce597173b45302b625f3c" Feb 02 17:25:13 crc kubenswrapper[4835]: I0202 17:25:13.439378 4835 scope.go:117] "RemoveContainer" containerID="c9ef10faeffd827c049817fc26193697ad1783dd921248d788b65807bb125be7" Feb 02 17:25:30 crc kubenswrapper[4835]: I0202 17:25:30.945906 4835 generic.go:334] "Generic (PLEG): container finished" podID="29d9c1e8-035d-485c-bbfa-2c0328468c6a" containerID="f58da85d00e261886139286c3cb4e323b1243c7601fd81f3ae106e5d8b44b40d" exitCode=0 Feb 02 17:25:30 crc kubenswrapper[4835]: I0202 17:25:30.946088 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" event={"ID":"29d9c1e8-035d-485c-bbfa-2c0328468c6a","Type":"ContainerDied","Data":"f58da85d00e261886139286c3cb4e323b1243c7601fd81f3ae106e5d8b44b40d"} Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.387347 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.521470 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-inventory\") pod \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.521563 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-bootstrap-combined-ca-bundle\") pod \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.521667 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ssh-key-openstack-edpm-ipam\") pod \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.521700 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nqzr\" (UniqueName: \"kubernetes.io/projected/29d9c1e8-035d-485c-bbfa-2c0328468c6a-kube-api-access-5nqzr\") pod \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.521751 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ceph\") pod \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\" (UID: \"29d9c1e8-035d-485c-bbfa-2c0328468c6a\") " Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.527608 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "29d9c1e8-035d-485c-bbfa-2c0328468c6a" (UID: "29d9c1e8-035d-485c-bbfa-2c0328468c6a"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.527752 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29d9c1e8-035d-485c-bbfa-2c0328468c6a-kube-api-access-5nqzr" (OuterVolumeSpecName: "kube-api-access-5nqzr") pod "29d9c1e8-035d-485c-bbfa-2c0328468c6a" (UID: "29d9c1e8-035d-485c-bbfa-2c0328468c6a"). InnerVolumeSpecName "kube-api-access-5nqzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.528365 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ceph" (OuterVolumeSpecName: "ceph") pod "29d9c1e8-035d-485c-bbfa-2c0328468c6a" (UID: "29d9c1e8-035d-485c-bbfa-2c0328468c6a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.546955 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "29d9c1e8-035d-485c-bbfa-2c0328468c6a" (UID: "29d9c1e8-035d-485c-bbfa-2c0328468c6a"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.564456 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-inventory" (OuterVolumeSpecName: "inventory") pod "29d9c1e8-035d-485c-bbfa-2c0328468c6a" (UID: "29d9c1e8-035d-485c-bbfa-2c0328468c6a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.624245 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.624308 4835 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.624322 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.624330 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nqzr\" (UniqueName: \"kubernetes.io/projected/29d9c1e8-035d-485c-bbfa-2c0328468c6a-kube-api-access-5nqzr\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.624338 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/29d9c1e8-035d-485c-bbfa-2c0328468c6a-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.963969 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" event={"ID":"29d9c1e8-035d-485c-bbfa-2c0328468c6a","Type":"ContainerDied","Data":"8d1f2a28edde0a2fa8b68f9542e60545d0878d426340cb3d9d801487c1d271c3"} Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.964016 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d1f2a28edde0a2fa8b68f9542e60545d0878d426340cb3d9d801487c1d271c3" Feb 02 17:25:32 crc kubenswrapper[4835]: I0202 17:25:32.964030 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.049042 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq"] Feb 02 17:25:33 crc kubenswrapper[4835]: E0202 17:25:33.049448 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29d9c1e8-035d-485c-bbfa-2c0328468c6a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.049471 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="29d9c1e8-035d-485c-bbfa-2c0328468c6a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 17:25:33 crc kubenswrapper[4835]: E0202 17:25:33.049487 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7155880-4527-4cda-86c4-655abbf7a4f9" containerName="registry-server" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.049494 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7155880-4527-4cda-86c4-655abbf7a4f9" containerName="registry-server" Feb 02 17:25:33 crc kubenswrapper[4835]: E0202 17:25:33.049508 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7155880-4527-4cda-86c4-655abbf7a4f9" containerName="extract-content" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.049514 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7155880-4527-4cda-86c4-655abbf7a4f9" containerName="extract-content" Feb 02 17:25:33 crc kubenswrapper[4835]: E0202 17:25:33.049524 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7155880-4527-4cda-86c4-655abbf7a4f9" containerName="extract-utilities" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.049531 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7155880-4527-4cda-86c4-655abbf7a4f9" containerName="extract-utilities" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.049702 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="29d9c1e8-035d-485c-bbfa-2c0328468c6a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.049721 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7155880-4527-4cda-86c4-655abbf7a4f9" containerName="registry-server" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.050576 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.055729 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.056139 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.056132 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.056293 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.056474 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.060924 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq"] Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.132549 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.132614 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.132784 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.132941 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjhsv\" (UniqueName: \"kubernetes.io/projected/87617dd5-12a8-49cc-867a-aa0f2d0db447-kube-api-access-hjhsv\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.234444 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.234540 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.234596 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.234667 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjhsv\" (UniqueName: \"kubernetes.io/projected/87617dd5-12a8-49cc-867a-aa0f2d0db447-kube-api-access-hjhsv\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.238802 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.238844 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.239239 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.262514 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjhsv\" (UniqueName: \"kubernetes.io/projected/87617dd5-12a8-49cc-867a-aa0f2d0db447-kube-api-access-hjhsv\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-7shcq\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.369397 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.932792 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq"] Feb 02 17:25:33 crc kubenswrapper[4835]: I0202 17:25:33.973572 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" event={"ID":"87617dd5-12a8-49cc-867a-aa0f2d0db447","Type":"ContainerStarted","Data":"df47c0ea97b24a0e70298e4578b9929a5a0755cd55a2886dc713570b7ce991af"} Feb 02 17:25:35 crc kubenswrapper[4835]: I0202 17:25:35.997101 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" event={"ID":"87617dd5-12a8-49cc-867a-aa0f2d0db447","Type":"ContainerStarted","Data":"b48f89c5ea34e70697b20b8c7699522bb9a359a107bcb5b9ab0603905fa05c27"} Feb 02 17:25:36 crc kubenswrapper[4835]: I0202 17:25:36.017768 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" podStartSLOduration=1.7341500810000001 podStartE2EDuration="3.017751982s" podCreationTimestamp="2026-02-02 17:25:33 +0000 UTC" firstStartedPulling="2026-02-02 17:25:33.937982716 +0000 UTC m=+2125.559586796" lastFinishedPulling="2026-02-02 17:25:35.221584617 +0000 UTC m=+2126.843188697" observedRunningTime="2026-02-02 17:25:36.016228468 +0000 UTC m=+2127.637832548" watchObservedRunningTime="2026-02-02 17:25:36.017751982 +0000 UTC m=+2127.639356062" Feb 02 17:25:44 crc kubenswrapper[4835]: I0202 17:25:44.870823 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:25:44 crc kubenswrapper[4835]: I0202 17:25:44.871503 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:25:57 crc kubenswrapper[4835]: I0202 17:25:57.167383 4835 generic.go:334] "Generic (PLEG): container finished" podID="87617dd5-12a8-49cc-867a-aa0f2d0db447" containerID="b48f89c5ea34e70697b20b8c7699522bb9a359a107bcb5b9ab0603905fa05c27" exitCode=0 Feb 02 17:25:57 crc kubenswrapper[4835]: I0202 17:25:57.167961 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" event={"ID":"87617dd5-12a8-49cc-867a-aa0f2d0db447","Type":"ContainerDied","Data":"b48f89c5ea34e70697b20b8c7699522bb9a359a107bcb5b9ab0603905fa05c27"} Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.638937 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.706441 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjhsv\" (UniqueName: \"kubernetes.io/projected/87617dd5-12a8-49cc-867a-aa0f2d0db447-kube-api-access-hjhsv\") pod \"87617dd5-12a8-49cc-867a-aa0f2d0db447\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.706553 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ssh-key-openstack-edpm-ipam\") pod \"87617dd5-12a8-49cc-867a-aa0f2d0db447\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.706768 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ceph\") pod \"87617dd5-12a8-49cc-867a-aa0f2d0db447\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.706793 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-inventory\") pod \"87617dd5-12a8-49cc-867a-aa0f2d0db447\" (UID: \"87617dd5-12a8-49cc-867a-aa0f2d0db447\") " Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.712583 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ceph" (OuterVolumeSpecName: "ceph") pod "87617dd5-12a8-49cc-867a-aa0f2d0db447" (UID: "87617dd5-12a8-49cc-867a-aa0f2d0db447"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.712669 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87617dd5-12a8-49cc-867a-aa0f2d0db447-kube-api-access-hjhsv" (OuterVolumeSpecName: "kube-api-access-hjhsv") pod "87617dd5-12a8-49cc-867a-aa0f2d0db447" (UID: "87617dd5-12a8-49cc-867a-aa0f2d0db447"). InnerVolumeSpecName "kube-api-access-hjhsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.737753 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-inventory" (OuterVolumeSpecName: "inventory") pod "87617dd5-12a8-49cc-867a-aa0f2d0db447" (UID: "87617dd5-12a8-49cc-867a-aa0f2d0db447"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.739122 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "87617dd5-12a8-49cc-867a-aa0f2d0db447" (UID: "87617dd5-12a8-49cc-867a-aa0f2d0db447"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.809145 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.809402 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.809485 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjhsv\" (UniqueName: \"kubernetes.io/projected/87617dd5-12a8-49cc-867a-aa0f2d0db447-kube-api-access-hjhsv\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:58 crc kubenswrapper[4835]: I0202 17:25:58.809594 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/87617dd5-12a8-49cc-867a-aa0f2d0db447-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.193230 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.206015 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-7shcq" event={"ID":"87617dd5-12a8-49cc-867a-aa0f2d0db447","Type":"ContainerDied","Data":"df47c0ea97b24a0e70298e4578b9929a5a0755cd55a2886dc713570b7ce991af"} Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.206067 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df47c0ea97b24a0e70298e4578b9929a5a0755cd55a2886dc713570b7ce991af" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.312349 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm"] Feb 02 17:25:59 crc kubenswrapper[4835]: E0202 17:25:59.312792 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87617dd5-12a8-49cc-867a-aa0f2d0db447" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.312817 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="87617dd5-12a8-49cc-867a-aa0f2d0db447" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.313055 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="87617dd5-12a8-49cc-867a-aa0f2d0db447" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.313843 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.315911 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.316986 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.317198 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.318089 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.320698 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.327242 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm"] Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.420417 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.420503 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbth2\" (UniqueName: \"kubernetes.io/projected/ae2d5259-fd96-4127-8bf9-ddba82deadf6-kube-api-access-hbth2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.420854 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.421028 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.522799 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbth2\" (UniqueName: \"kubernetes.io/projected/ae2d5259-fd96-4127-8bf9-ddba82deadf6-kube-api-access-hbth2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.522970 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.523038 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.523091 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.526818 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.527242 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.528255 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.546151 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbth2\" (UniqueName: \"kubernetes.io/projected/ae2d5259-fd96-4127-8bf9-ddba82deadf6-kube-api-access-hbth2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:25:59 crc kubenswrapper[4835]: I0202 17:25:59.652638 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:26:00 crc kubenswrapper[4835]: I0202 17:26:00.164425 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm"] Feb 02 17:26:00 crc kubenswrapper[4835]: I0202 17:26:00.198117 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" event={"ID":"ae2d5259-fd96-4127-8bf9-ddba82deadf6","Type":"ContainerStarted","Data":"d8df77156cb1cabcf086f66a539273db23a05e5bacac2c7f8194103c53071edf"} Feb 02 17:26:01 crc kubenswrapper[4835]: I0202 17:26:01.207363 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" event={"ID":"ae2d5259-fd96-4127-8bf9-ddba82deadf6","Type":"ContainerStarted","Data":"08d4196a82fd13ae984fff3d646796d9021328aeb0bc472f47e3cc64356a0ef3"} Feb 02 17:26:01 crc kubenswrapper[4835]: I0202 17:26:01.230019 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" podStartSLOduration=1.76027719 podStartE2EDuration="2.229991274s" podCreationTimestamp="2026-02-02 17:25:59 +0000 UTC" firstStartedPulling="2026-02-02 17:26:00.173146355 +0000 UTC m=+2151.794750425" lastFinishedPulling="2026-02-02 17:26:00.642860419 +0000 UTC m=+2152.264464509" observedRunningTime="2026-02-02 17:26:01.226221688 +0000 UTC m=+2152.847825768" watchObservedRunningTime="2026-02-02 17:26:01.229991274 +0000 UTC m=+2152.851595394" Feb 02 17:26:05 crc kubenswrapper[4835]: I0202 17:26:05.240074 4835 generic.go:334] "Generic (PLEG): container finished" podID="ae2d5259-fd96-4127-8bf9-ddba82deadf6" containerID="08d4196a82fd13ae984fff3d646796d9021328aeb0bc472f47e3cc64356a0ef3" exitCode=0 Feb 02 17:26:05 crc kubenswrapper[4835]: I0202 17:26:05.240158 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" event={"ID":"ae2d5259-fd96-4127-8bf9-ddba82deadf6","Type":"ContainerDied","Data":"08d4196a82fd13ae984fff3d646796d9021328aeb0bc472f47e3cc64356a0ef3"} Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.715666 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.751446 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ceph\") pod \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.751544 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-inventory\") pod \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.751642 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbth2\" (UniqueName: \"kubernetes.io/projected/ae2d5259-fd96-4127-8bf9-ddba82deadf6-kube-api-access-hbth2\") pod \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.751733 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ssh-key-openstack-edpm-ipam\") pod \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\" (UID: \"ae2d5259-fd96-4127-8bf9-ddba82deadf6\") " Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.759546 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ceph" (OuterVolumeSpecName: "ceph") pod "ae2d5259-fd96-4127-8bf9-ddba82deadf6" (UID: "ae2d5259-fd96-4127-8bf9-ddba82deadf6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.771422 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae2d5259-fd96-4127-8bf9-ddba82deadf6-kube-api-access-hbth2" (OuterVolumeSpecName: "kube-api-access-hbth2") pod "ae2d5259-fd96-4127-8bf9-ddba82deadf6" (UID: "ae2d5259-fd96-4127-8bf9-ddba82deadf6"). InnerVolumeSpecName "kube-api-access-hbth2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.776247 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-inventory" (OuterVolumeSpecName: "inventory") pod "ae2d5259-fd96-4127-8bf9-ddba82deadf6" (UID: "ae2d5259-fd96-4127-8bf9-ddba82deadf6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.787166 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ae2d5259-fd96-4127-8bf9-ddba82deadf6" (UID: "ae2d5259-fd96-4127-8bf9-ddba82deadf6"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.853161 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.853199 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.853211 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae2d5259-fd96-4127-8bf9-ddba82deadf6-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:06 crc kubenswrapper[4835]: I0202 17:26:06.853225 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbth2\" (UniqueName: \"kubernetes.io/projected/ae2d5259-fd96-4127-8bf9-ddba82deadf6-kube-api-access-hbth2\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.258897 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" event={"ID":"ae2d5259-fd96-4127-8bf9-ddba82deadf6","Type":"ContainerDied","Data":"d8df77156cb1cabcf086f66a539273db23a05e5bacac2c7f8194103c53071edf"} Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.258934 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8df77156cb1cabcf086f66a539273db23a05e5bacac2c7f8194103c53071edf" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.258983 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.347325 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54"] Feb 02 17:26:07 crc kubenswrapper[4835]: E0202 17:26:07.347761 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae2d5259-fd96-4127-8bf9-ddba82deadf6" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.347781 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae2d5259-fd96-4127-8bf9-ddba82deadf6" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.347994 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae2d5259-fd96-4127-8bf9-ddba82deadf6" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.348720 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.350577 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.351474 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.351620 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.351736 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.351743 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.358578 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54"] Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.463406 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.463607 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.463658 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgvbj\" (UniqueName: \"kubernetes.io/projected/c814521e-9a8e-41bd-8eb9-05990dbe267f-kube-api-access-dgvbj\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.463731 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.565081 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.565201 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.565231 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgvbj\" (UniqueName: \"kubernetes.io/projected/c814521e-9a8e-41bd-8eb9-05990dbe267f-kube-api-access-dgvbj\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.565294 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.570337 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.570338 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.579926 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.582697 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgvbj\" (UniqueName: \"kubernetes.io/projected/c814521e-9a8e-41bd-8eb9-05990dbe267f-kube-api-access-dgvbj\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-4zj54\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:07 crc kubenswrapper[4835]: I0202 17:26:07.665858 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:08 crc kubenswrapper[4835]: I0202 17:26:08.160977 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54"] Feb 02 17:26:08 crc kubenswrapper[4835]: I0202 17:26:08.268106 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" event={"ID":"c814521e-9a8e-41bd-8eb9-05990dbe267f","Type":"ContainerStarted","Data":"8191309b4e48499f36ab3ef208d8b9d576540e8edb4d33ffcbe473ca3b32e2c2"} Feb 02 17:26:09 crc kubenswrapper[4835]: I0202 17:26:09.276814 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" event={"ID":"c814521e-9a8e-41bd-8eb9-05990dbe267f","Type":"ContainerStarted","Data":"f8e5b91fea3e8c92cf7a5017c5db31a3c06dc4cac128bbbcce99fb1f5d510b28"} Feb 02 17:26:09 crc kubenswrapper[4835]: I0202 17:26:09.298951 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" podStartSLOduration=1.661031537 podStartE2EDuration="2.298931642s" podCreationTimestamp="2026-02-02 17:26:07 +0000 UTC" firstStartedPulling="2026-02-02 17:26:08.164954915 +0000 UTC m=+2159.786558995" lastFinishedPulling="2026-02-02 17:26:08.80285498 +0000 UTC m=+2160.424459100" observedRunningTime="2026-02-02 17:26:09.291964925 +0000 UTC m=+2160.913569005" watchObservedRunningTime="2026-02-02 17:26:09.298931642 +0000 UTC m=+2160.920535732" Feb 02 17:26:14 crc kubenswrapper[4835]: I0202 17:26:14.870575 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:26:14 crc kubenswrapper[4835]: I0202 17:26:14.871105 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:26:41 crc kubenswrapper[4835]: I0202 17:26:41.541438 4835 generic.go:334] "Generic (PLEG): container finished" podID="c814521e-9a8e-41bd-8eb9-05990dbe267f" containerID="f8e5b91fea3e8c92cf7a5017c5db31a3c06dc4cac128bbbcce99fb1f5d510b28" exitCode=0 Feb 02 17:26:41 crc kubenswrapper[4835]: I0202 17:26:41.541530 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" event={"ID":"c814521e-9a8e-41bd-8eb9-05990dbe267f","Type":"ContainerDied","Data":"f8e5b91fea3e8c92cf7a5017c5db31a3c06dc4cac128bbbcce99fb1f5d510b28"} Feb 02 17:26:42 crc kubenswrapper[4835]: I0202 17:26:42.925639 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.085170 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ceph\") pod \"c814521e-9a8e-41bd-8eb9-05990dbe267f\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.085734 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-inventory\") pod \"c814521e-9a8e-41bd-8eb9-05990dbe267f\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.085849 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ssh-key-openstack-edpm-ipam\") pod \"c814521e-9a8e-41bd-8eb9-05990dbe267f\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.085900 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgvbj\" (UniqueName: \"kubernetes.io/projected/c814521e-9a8e-41bd-8eb9-05990dbe267f-kube-api-access-dgvbj\") pod \"c814521e-9a8e-41bd-8eb9-05990dbe267f\" (UID: \"c814521e-9a8e-41bd-8eb9-05990dbe267f\") " Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.095662 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c814521e-9a8e-41bd-8eb9-05990dbe267f-kube-api-access-dgvbj" (OuterVolumeSpecName: "kube-api-access-dgvbj") pod "c814521e-9a8e-41bd-8eb9-05990dbe267f" (UID: "c814521e-9a8e-41bd-8eb9-05990dbe267f"). InnerVolumeSpecName "kube-api-access-dgvbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.098392 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ceph" (OuterVolumeSpecName: "ceph") pod "c814521e-9a8e-41bd-8eb9-05990dbe267f" (UID: "c814521e-9a8e-41bd-8eb9-05990dbe267f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.116334 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-inventory" (OuterVolumeSpecName: "inventory") pod "c814521e-9a8e-41bd-8eb9-05990dbe267f" (UID: "c814521e-9a8e-41bd-8eb9-05990dbe267f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.134520 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "c814521e-9a8e-41bd-8eb9-05990dbe267f" (UID: "c814521e-9a8e-41bd-8eb9-05990dbe267f"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.188976 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.189076 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgvbj\" (UniqueName: \"kubernetes.io/projected/c814521e-9a8e-41bd-8eb9-05990dbe267f-kube-api-access-dgvbj\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.189103 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.189132 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c814521e-9a8e-41bd-8eb9-05990dbe267f-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.560489 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" event={"ID":"c814521e-9a8e-41bd-8eb9-05990dbe267f","Type":"ContainerDied","Data":"8191309b4e48499f36ab3ef208d8b9d576540e8edb4d33ffcbe473ca3b32e2c2"} Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.560541 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8191309b4e48499f36ab3ef208d8b9d576540e8edb4d33ffcbe473ca3b32e2c2" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.560581 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-4zj54" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.707458 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8"] Feb 02 17:26:43 crc kubenswrapper[4835]: E0202 17:26:43.707934 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c814521e-9a8e-41bd-8eb9-05990dbe267f" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.707967 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="c814521e-9a8e-41bd-8eb9-05990dbe267f" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.708256 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="c814521e-9a8e-41bd-8eb9-05990dbe267f" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.711167 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.713538 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.713814 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.714165 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.714346 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.714983 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.718729 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8"] Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.901393 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.901489 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ssh-key-openstack-edpm-ipam\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.901535 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sr97\" (UniqueName: \"kubernetes.io/projected/644fa065-6ba2-4813-84c8-c8f3d8da2971-kube-api-access-4sr97\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:43 crc kubenswrapper[4835]: I0202 17:26:43.901608 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.003200 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.003378 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.003424 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ssh-key-openstack-edpm-ipam\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.003455 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sr97\" (UniqueName: \"kubernetes.io/projected/644fa065-6ba2-4813-84c8-c8f3d8da2971-kube-api-access-4sr97\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.007157 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.008342 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ssh-key-openstack-edpm-ipam\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.021011 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.023194 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sr97\" (UniqueName: \"kubernetes.io/projected/644fa065-6ba2-4813-84c8-c8f3d8da2971-kube-api-access-4sr97\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.033344 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.379601 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8"] Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.573048 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" event={"ID":"644fa065-6ba2-4813-84c8-c8f3d8da2971","Type":"ContainerStarted","Data":"87cbfcd8e8897af89bb53c8ce95dd38fee79ce824b7fbc90198bd273d31ab31f"} Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.870977 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.871480 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.871555 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.872749 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7b9b67ec0a70d873df2f1e945ad068f814ea600a71d949977865039aa50f1fc5"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:26:44 crc kubenswrapper[4835]: I0202 17:26:44.872884 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://7b9b67ec0a70d873df2f1e945ad068f814ea600a71d949977865039aa50f1fc5" gracePeriod=600 Feb 02 17:26:45 crc kubenswrapper[4835]: I0202 17:26:45.583933 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" event={"ID":"644fa065-6ba2-4813-84c8-c8f3d8da2971","Type":"ContainerStarted","Data":"2ed4a1dfb61436585cd3fd342dbbb389cb83634458fc270a0c7e9cd1a94bb3bb"} Feb 02 17:26:45 crc kubenswrapper[4835]: I0202 17:26:45.588425 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="7b9b67ec0a70d873df2f1e945ad068f814ea600a71d949977865039aa50f1fc5" exitCode=0 Feb 02 17:26:45 crc kubenswrapper[4835]: I0202 17:26:45.588480 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"7b9b67ec0a70d873df2f1e945ad068f814ea600a71d949977865039aa50f1fc5"} Feb 02 17:26:45 crc kubenswrapper[4835]: I0202 17:26:45.588511 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b"} Feb 02 17:26:45 crc kubenswrapper[4835]: I0202 17:26:45.588532 4835 scope.go:117] "RemoveContainer" containerID="03be0f4feebb5946b23b18c07607838f81dd2d2a728bc2fc4d142be966f3ad94" Feb 02 17:26:45 crc kubenswrapper[4835]: I0202 17:26:45.613051 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" podStartSLOduration=1.959691453 podStartE2EDuration="2.613026476s" podCreationTimestamp="2026-02-02 17:26:43 +0000 UTC" firstStartedPulling="2026-02-02 17:26:44.39378623 +0000 UTC m=+2196.015390330" lastFinishedPulling="2026-02-02 17:26:45.047121273 +0000 UTC m=+2196.668725353" observedRunningTime="2026-02-02 17:26:45.606056878 +0000 UTC m=+2197.227660958" watchObservedRunningTime="2026-02-02 17:26:45.613026476 +0000 UTC m=+2197.234630576" Feb 02 17:26:48 crc kubenswrapper[4835]: E0202 17:26:48.941395 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod644fa065_6ba2_4813_84c8_c8f3d8da2971.slice/crio-2ed4a1dfb61436585cd3fd342dbbb389cb83634458fc270a0c7e9cd1a94bb3bb.scope\": RecentStats: unable to find data in memory cache]" Feb 02 17:26:49 crc kubenswrapper[4835]: I0202 17:26:49.626178 4835 generic.go:334] "Generic (PLEG): container finished" podID="644fa065-6ba2-4813-84c8-c8f3d8da2971" containerID="2ed4a1dfb61436585cd3fd342dbbb389cb83634458fc270a0c7e9cd1a94bb3bb" exitCode=0 Feb 02 17:26:49 crc kubenswrapper[4835]: I0202 17:26:49.626324 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" event={"ID":"644fa065-6ba2-4813-84c8-c8f3d8da2971","Type":"ContainerDied","Data":"2ed4a1dfb61436585cd3fd342dbbb389cb83634458fc270a0c7e9cd1a94bb3bb"} Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.051488 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.236776 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sr97\" (UniqueName: \"kubernetes.io/projected/644fa065-6ba2-4813-84c8-c8f3d8da2971-kube-api-access-4sr97\") pod \"644fa065-6ba2-4813-84c8-c8f3d8da2971\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.236843 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ceph\") pod \"644fa065-6ba2-4813-84c8-c8f3d8da2971\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.236976 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ssh-key-openstack-edpm-ipam\") pod \"644fa065-6ba2-4813-84c8-c8f3d8da2971\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.237085 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-inventory\") pod \"644fa065-6ba2-4813-84c8-c8f3d8da2971\" (UID: \"644fa065-6ba2-4813-84c8-c8f3d8da2971\") " Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.244157 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/644fa065-6ba2-4813-84c8-c8f3d8da2971-kube-api-access-4sr97" (OuterVolumeSpecName: "kube-api-access-4sr97") pod "644fa065-6ba2-4813-84c8-c8f3d8da2971" (UID: "644fa065-6ba2-4813-84c8-c8f3d8da2971"). InnerVolumeSpecName "kube-api-access-4sr97". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.244245 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ceph" (OuterVolumeSpecName: "ceph") pod "644fa065-6ba2-4813-84c8-c8f3d8da2971" (UID: "644fa065-6ba2-4813-84c8-c8f3d8da2971"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.266352 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-inventory" (OuterVolumeSpecName: "inventory") pod "644fa065-6ba2-4813-84c8-c8f3d8da2971" (UID: "644fa065-6ba2-4813-84c8-c8f3d8da2971"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.288751 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "644fa065-6ba2-4813-84c8-c8f3d8da2971" (UID: "644fa065-6ba2-4813-84c8-c8f3d8da2971"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.338549 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.338589 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.338598 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sr97\" (UniqueName: \"kubernetes.io/projected/644fa065-6ba2-4813-84c8-c8f3d8da2971-kube-api-access-4sr97\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.338606 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/644fa065-6ba2-4813-84c8-c8f3d8da2971-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.657370 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" event={"ID":"644fa065-6ba2-4813-84c8-c8f3d8da2971","Type":"ContainerDied","Data":"87cbfcd8e8897af89bb53c8ce95dd38fee79ce824b7fbc90198bd273d31ab31f"} Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.657408 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87cbfcd8e8897af89bb53c8ce95dd38fee79ce824b7fbc90198bd273d31ab31f" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.657534 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.741048 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p"] Feb 02 17:26:51 crc kubenswrapper[4835]: E0202 17:26:51.741533 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="644fa065-6ba2-4813-84c8-c8f3d8da2971" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.741561 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="644fa065-6ba2-4813-84c8-c8f3d8da2971" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.741813 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="644fa065-6ba2-4813-84c8-c8f3d8da2971" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.742744 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.744649 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.745655 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.745803 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.746890 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.751628 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p"] Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.751807 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.845171 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq89k\" (UniqueName: \"kubernetes.io/projected/1125f088-790d-4b32-831f-970cba6dc015-kube-api-access-tq89k\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.845241 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.845363 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.845387 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.946511 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.946666 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.946695 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.946750 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq89k\" (UniqueName: \"kubernetes.io/projected/1125f088-790d-4b32-831f-970cba6dc015-kube-api-access-tq89k\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.950331 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.950938 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.951178 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:51 crc kubenswrapper[4835]: I0202 17:26:51.978333 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq89k\" (UniqueName: \"kubernetes.io/projected/1125f088-790d-4b32-831f-970cba6dc015-kube-api-access-tq89k\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:52 crc kubenswrapper[4835]: I0202 17:26:52.105447 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:26:52 crc kubenswrapper[4835]: I0202 17:26:52.681642 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p"] Feb 02 17:26:53 crc kubenswrapper[4835]: I0202 17:26:53.672789 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" event={"ID":"1125f088-790d-4b32-831f-970cba6dc015","Type":"ContainerStarted","Data":"efb6f2ea17756d5c5e9477fa8f0a34095dcce1a14b6ccc966e5b6795f03bef61"} Feb 02 17:26:53 crc kubenswrapper[4835]: I0202 17:26:53.673132 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" event={"ID":"1125f088-790d-4b32-831f-970cba6dc015","Type":"ContainerStarted","Data":"cf57c3dc832cf55a6c53dc5752f129d2614d0c39b4cf8cd4705b9cb81a084775"} Feb 02 17:26:53 crc kubenswrapper[4835]: I0202 17:26:53.690447 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" podStartSLOduration=2.223078938 podStartE2EDuration="2.690429447s" podCreationTimestamp="2026-02-02 17:26:51 +0000 UTC" firstStartedPulling="2026-02-02 17:26:52.694597243 +0000 UTC m=+2204.316201323" lastFinishedPulling="2026-02-02 17:26:53.161947752 +0000 UTC m=+2204.783551832" observedRunningTime="2026-02-02 17:26:53.688188803 +0000 UTC m=+2205.309792893" watchObservedRunningTime="2026-02-02 17:26:53.690429447 +0000 UTC m=+2205.312033527" Feb 02 17:27:30 crc kubenswrapper[4835]: I0202 17:27:30.972191 4835 generic.go:334] "Generic (PLEG): container finished" podID="1125f088-790d-4b32-831f-970cba6dc015" containerID="efb6f2ea17756d5c5e9477fa8f0a34095dcce1a14b6ccc966e5b6795f03bef61" exitCode=0 Feb 02 17:27:30 crc kubenswrapper[4835]: I0202 17:27:30.972248 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" event={"ID":"1125f088-790d-4b32-831f-970cba6dc015","Type":"ContainerDied","Data":"efb6f2ea17756d5c5e9477fa8f0a34095dcce1a14b6ccc966e5b6795f03bef61"} Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.350135 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.405943 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ceph\") pod \"1125f088-790d-4b32-831f-970cba6dc015\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.406212 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ssh-key-openstack-edpm-ipam\") pod \"1125f088-790d-4b32-831f-970cba6dc015\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.406293 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tq89k\" (UniqueName: \"kubernetes.io/projected/1125f088-790d-4b32-831f-970cba6dc015-kube-api-access-tq89k\") pod \"1125f088-790d-4b32-831f-970cba6dc015\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.406357 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-inventory\") pod \"1125f088-790d-4b32-831f-970cba6dc015\" (UID: \"1125f088-790d-4b32-831f-970cba6dc015\") " Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.413147 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1125f088-790d-4b32-831f-970cba6dc015-kube-api-access-tq89k" (OuterVolumeSpecName: "kube-api-access-tq89k") pod "1125f088-790d-4b32-831f-970cba6dc015" (UID: "1125f088-790d-4b32-831f-970cba6dc015"). InnerVolumeSpecName "kube-api-access-tq89k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.414035 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ceph" (OuterVolumeSpecName: "ceph") pod "1125f088-790d-4b32-831f-970cba6dc015" (UID: "1125f088-790d-4b32-831f-970cba6dc015"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.439130 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-inventory" (OuterVolumeSpecName: "inventory") pod "1125f088-790d-4b32-831f-970cba6dc015" (UID: "1125f088-790d-4b32-831f-970cba6dc015"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.456239 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1125f088-790d-4b32-831f-970cba6dc015" (UID: "1125f088-790d-4b32-831f-970cba6dc015"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.508880 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.509194 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tq89k\" (UniqueName: \"kubernetes.io/projected/1125f088-790d-4b32-831f-970cba6dc015-kube-api-access-tq89k\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.509206 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.509218 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1125f088-790d-4b32-831f-970cba6dc015-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.990251 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" event={"ID":"1125f088-790d-4b32-831f-970cba6dc015","Type":"ContainerDied","Data":"cf57c3dc832cf55a6c53dc5752f129d2614d0c39b4cf8cd4705b9cb81a084775"} Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.990347 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf57c3dc832cf55a6c53dc5752f129d2614d0c39b4cf8cd4705b9cb81a084775" Feb 02 17:27:32 crc kubenswrapper[4835]: I0202 17:27:32.990376 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.093104 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-jm7mm"] Feb 02 17:27:33 crc kubenswrapper[4835]: E0202 17:27:33.094106 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1125f088-790d-4b32-831f-970cba6dc015" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.094122 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="1125f088-790d-4b32-831f-970cba6dc015" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.094320 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="1125f088-790d-4b32-831f-970cba6dc015" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.095759 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.098512 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.098582 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.098985 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.101424 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.101913 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.113625 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-jm7mm"] Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.117981 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ceph\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.118039 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.118068 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.118125 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-868ms\" (UniqueName: \"kubernetes.io/projected/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-kube-api-access-868ms\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.220194 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-868ms\" (UniqueName: \"kubernetes.io/projected/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-kube-api-access-868ms\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.220451 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ceph\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.220502 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.220528 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.229303 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.229349 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ceph\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.236910 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.242897 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-868ms\" (UniqueName: \"kubernetes.io/projected/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-kube-api-access-868ms\") pod \"ssh-known-hosts-edpm-deployment-jm7mm\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.427831 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:33 crc kubenswrapper[4835]: I0202 17:27:33.992366 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-jm7mm"] Feb 02 17:27:34 crc kubenswrapper[4835]: I0202 17:27:34.002411 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" event={"ID":"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2","Type":"ContainerStarted","Data":"4fd3644987a7bfbe387c8128e91d14a15fba119cdab8d811ebf0b60da4b09937"} Feb 02 17:27:35 crc kubenswrapper[4835]: I0202 17:27:35.013421 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" event={"ID":"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2","Type":"ContainerStarted","Data":"5c2253c1b7b9180c6034d5edbad6ca605a1e784099a3e4f0c803004cec602d51"} Feb 02 17:27:35 crc kubenswrapper[4835]: I0202 17:27:35.042353 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" podStartSLOduration=1.620121724 podStartE2EDuration="2.042332504s" podCreationTimestamp="2026-02-02 17:27:33 +0000 UTC" firstStartedPulling="2026-02-02 17:27:33.994155473 +0000 UTC m=+2245.615759593" lastFinishedPulling="2026-02-02 17:27:34.416366293 +0000 UTC m=+2246.037970373" observedRunningTime="2026-02-02 17:27:35.032439293 +0000 UTC m=+2246.654043393" watchObservedRunningTime="2026-02-02 17:27:35.042332504 +0000 UTC m=+2246.663936594" Feb 02 17:27:43 crc kubenswrapper[4835]: I0202 17:27:43.088859 4835 generic.go:334] "Generic (PLEG): container finished" podID="ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2" containerID="5c2253c1b7b9180c6034d5edbad6ca605a1e784099a3e4f0c803004cec602d51" exitCode=0 Feb 02 17:27:43 crc kubenswrapper[4835]: I0202 17:27:43.089014 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" event={"ID":"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2","Type":"ContainerDied","Data":"5c2253c1b7b9180c6034d5edbad6ca605a1e784099a3e4f0c803004cec602d51"} Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.560679 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.736533 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-inventory-0\") pod \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.736881 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ssh-key-openstack-edpm-ipam\") pod \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.736966 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-868ms\" (UniqueName: \"kubernetes.io/projected/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-kube-api-access-868ms\") pod \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.737093 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ceph\") pod \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\" (UID: \"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2\") " Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.742763 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ceph" (OuterVolumeSpecName: "ceph") pod "ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2" (UID: "ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.742849 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-kube-api-access-868ms" (OuterVolumeSpecName: "kube-api-access-868ms") pod "ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2" (UID: "ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2"). InnerVolumeSpecName "kube-api-access-868ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.767708 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2" (UID: "ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.767920 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2" (UID: "ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.839882 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-868ms\" (UniqueName: \"kubernetes.io/projected/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-kube-api-access-868ms\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.839920 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.839933 4835 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-inventory-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:44 crc kubenswrapper[4835]: I0202 17:27:44.839945 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.109805 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" event={"ID":"ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2","Type":"ContainerDied","Data":"4fd3644987a7bfbe387c8128e91d14a15fba119cdab8d811ebf0b60da4b09937"} Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.109844 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4fd3644987a7bfbe387c8128e91d14a15fba119cdab8d811ebf0b60da4b09937" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.109950 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jm7mm" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.177612 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h"] Feb 02 17:27:45 crc kubenswrapper[4835]: E0202 17:27:45.178046 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2" containerName="ssh-known-hosts-edpm-deployment" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.178070 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2" containerName="ssh-known-hosts-edpm-deployment" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.178381 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2" containerName="ssh-known-hosts-edpm-deployment" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.179132 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.182368 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.182575 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.182718 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.183067 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.183203 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.199057 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h"] Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.248548 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.248618 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.249009 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcvwp\" (UniqueName: \"kubernetes.io/projected/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-kube-api-access-rcvwp\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.249060 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.350879 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.350928 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.351055 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcvwp\" (UniqueName: \"kubernetes.io/projected/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-kube-api-access-rcvwp\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.351077 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.354332 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.356454 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.356668 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.373473 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcvwp\" (UniqueName: \"kubernetes.io/projected/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-kube-api-access-rcvwp\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5227h\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:45 crc kubenswrapper[4835]: I0202 17:27:45.499488 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:46 crc kubenswrapper[4835]: I0202 17:27:46.040249 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h"] Feb 02 17:27:46 crc kubenswrapper[4835]: I0202 17:27:46.119607 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" event={"ID":"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed","Type":"ContainerStarted","Data":"804216028dd09525c6109327ca6992fe85f08ea0d5a9088be5a18fe469ad8587"} Feb 02 17:27:47 crc kubenswrapper[4835]: I0202 17:27:47.132567 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" event={"ID":"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed","Type":"ContainerStarted","Data":"499281a456aa4b8dad9ea84ab0c00a7df9ecb9524ad23b3ecaf36fdb8c584425"} Feb 02 17:27:47 crc kubenswrapper[4835]: I0202 17:27:47.161999 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" podStartSLOduration=1.613300008 podStartE2EDuration="2.161964676s" podCreationTimestamp="2026-02-02 17:27:45 +0000 UTC" firstStartedPulling="2026-02-02 17:27:46.041814244 +0000 UTC m=+2257.663418324" lastFinishedPulling="2026-02-02 17:27:46.590478892 +0000 UTC m=+2258.212082992" observedRunningTime="2026-02-02 17:27:47.159144696 +0000 UTC m=+2258.780748846" watchObservedRunningTime="2026-02-02 17:27:47.161964676 +0000 UTC m=+2258.783568796" Feb 02 17:27:54 crc kubenswrapper[4835]: I0202 17:27:54.193697 4835 generic.go:334] "Generic (PLEG): container finished" podID="c6aee2d4-013e-4ac6-a7f0-f5f640c724ed" containerID="499281a456aa4b8dad9ea84ab0c00a7df9ecb9524ad23b3ecaf36fdb8c584425" exitCode=0 Feb 02 17:27:54 crc kubenswrapper[4835]: I0202 17:27:54.193799 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" event={"ID":"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed","Type":"ContainerDied","Data":"499281a456aa4b8dad9ea84ab0c00a7df9ecb9524ad23b3ecaf36fdb8c584425"} Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.625430 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.749463 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcvwp\" (UniqueName: \"kubernetes.io/projected/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-kube-api-access-rcvwp\") pod \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.749508 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ssh-key-openstack-edpm-ipam\") pod \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.749611 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ceph\") pod \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.749631 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-inventory\") pod \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\" (UID: \"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed\") " Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.755791 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-kube-api-access-rcvwp" (OuterVolumeSpecName: "kube-api-access-rcvwp") pod "c6aee2d4-013e-4ac6-a7f0-f5f640c724ed" (UID: "c6aee2d4-013e-4ac6-a7f0-f5f640c724ed"). InnerVolumeSpecName "kube-api-access-rcvwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.755889 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ceph" (OuterVolumeSpecName: "ceph") pod "c6aee2d4-013e-4ac6-a7f0-f5f640c724ed" (UID: "c6aee2d4-013e-4ac6-a7f0-f5f640c724ed"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.778785 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-inventory" (OuterVolumeSpecName: "inventory") pod "c6aee2d4-013e-4ac6-a7f0-f5f640c724ed" (UID: "c6aee2d4-013e-4ac6-a7f0-f5f640c724ed"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.786014 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "c6aee2d4-013e-4ac6-a7f0-f5f640c724ed" (UID: "c6aee2d4-013e-4ac6-a7f0-f5f640c724ed"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.851637 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.851671 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.851684 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcvwp\" (UniqueName: \"kubernetes.io/projected/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-kube-api-access-rcvwp\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:55 crc kubenswrapper[4835]: I0202 17:27:55.851693 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/c6aee2d4-013e-4ac6-a7f0-f5f640c724ed-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.214296 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" event={"ID":"c6aee2d4-013e-4ac6-a7f0-f5f640c724ed","Type":"ContainerDied","Data":"804216028dd09525c6109327ca6992fe85f08ea0d5a9088be5a18fe469ad8587"} Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.214344 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="804216028dd09525c6109327ca6992fe85f08ea0d5a9088be5a18fe469ad8587" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.214362 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5227h" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.323333 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj"] Feb 02 17:27:56 crc kubenswrapper[4835]: E0202 17:27:56.324266 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6aee2d4-013e-4ac6-a7f0-f5f640c724ed" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.324314 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6aee2d4-013e-4ac6-a7f0-f5f640c724ed" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.325011 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6aee2d4-013e-4ac6-a7f0-f5f640c724ed" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.326340 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.332178 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.335421 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.335706 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.336129 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.337630 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.355094 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj"] Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.463111 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.463351 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.463466 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.463568 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wglbl\" (UniqueName: \"kubernetes.io/projected/7f86d2bc-c7cf-42c8-b62a-828961f9e880-kube-api-access-wglbl\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.565463 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.565657 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.565759 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.565847 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wglbl\" (UniqueName: \"kubernetes.io/projected/7f86d2bc-c7cf-42c8-b62a-828961f9e880-kube-api-access-wglbl\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.570300 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.572460 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.573372 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.587949 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wglbl\" (UniqueName: \"kubernetes.io/projected/7f86d2bc-c7cf-42c8-b62a-828961f9e880-kube-api-access-wglbl\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:56 crc kubenswrapper[4835]: I0202 17:27:56.662091 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:27:57 crc kubenswrapper[4835]: I0202 17:27:57.167376 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj"] Feb 02 17:27:57 crc kubenswrapper[4835]: I0202 17:27:57.223179 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" event={"ID":"7f86d2bc-c7cf-42c8-b62a-828961f9e880","Type":"ContainerStarted","Data":"0270e9349afe78d28cd98cc36d76a67e47167fc92516555510146f82451d6edd"} Feb 02 17:27:58 crc kubenswrapper[4835]: I0202 17:27:58.241554 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" event={"ID":"7f86d2bc-c7cf-42c8-b62a-828961f9e880","Type":"ContainerStarted","Data":"4084d751b2d223bcfc5ff562b271549099c0abe3af8308aad2bd3bd4f6539f61"} Feb 02 17:27:58 crc kubenswrapper[4835]: I0202 17:27:58.266656 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" podStartSLOduration=1.7203269410000002 podStartE2EDuration="2.266640631s" podCreationTimestamp="2026-02-02 17:27:56 +0000 UTC" firstStartedPulling="2026-02-02 17:27:57.177436338 +0000 UTC m=+2268.799040418" lastFinishedPulling="2026-02-02 17:27:57.723750008 +0000 UTC m=+2269.345354108" observedRunningTime="2026-02-02 17:27:58.260381764 +0000 UTC m=+2269.881985844" watchObservedRunningTime="2026-02-02 17:27:58.266640631 +0000 UTC m=+2269.888244711" Feb 02 17:28:07 crc kubenswrapper[4835]: I0202 17:28:07.322004 4835 generic.go:334] "Generic (PLEG): container finished" podID="7f86d2bc-c7cf-42c8-b62a-828961f9e880" containerID="4084d751b2d223bcfc5ff562b271549099c0abe3af8308aad2bd3bd4f6539f61" exitCode=0 Feb 02 17:28:07 crc kubenswrapper[4835]: I0202 17:28:07.322094 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" event={"ID":"7f86d2bc-c7cf-42c8-b62a-828961f9e880","Type":"ContainerDied","Data":"4084d751b2d223bcfc5ff562b271549099c0abe3af8308aad2bd3bd4f6539f61"} Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.689139 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.790683 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ceph\") pod \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.790733 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wglbl\" (UniqueName: \"kubernetes.io/projected/7f86d2bc-c7cf-42c8-b62a-828961f9e880-kube-api-access-wglbl\") pod \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.790910 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ssh-key-openstack-edpm-ipam\") pod \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.791037 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-inventory\") pod \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\" (UID: \"7f86d2bc-c7cf-42c8-b62a-828961f9e880\") " Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.795918 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ceph" (OuterVolumeSpecName: "ceph") pod "7f86d2bc-c7cf-42c8-b62a-828961f9e880" (UID: "7f86d2bc-c7cf-42c8-b62a-828961f9e880"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.797405 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f86d2bc-c7cf-42c8-b62a-828961f9e880-kube-api-access-wglbl" (OuterVolumeSpecName: "kube-api-access-wglbl") pod "7f86d2bc-c7cf-42c8-b62a-828961f9e880" (UID: "7f86d2bc-c7cf-42c8-b62a-828961f9e880"). InnerVolumeSpecName "kube-api-access-wglbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.820026 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "7f86d2bc-c7cf-42c8-b62a-828961f9e880" (UID: "7f86d2bc-c7cf-42c8-b62a-828961f9e880"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.823113 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-inventory" (OuterVolumeSpecName: "inventory") pod "7f86d2bc-c7cf-42c8-b62a-828961f9e880" (UID: "7f86d2bc-c7cf-42c8-b62a-828961f9e880"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.894110 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.894365 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.894375 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7f86d2bc-c7cf-42c8-b62a-828961f9e880-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:08 crc kubenswrapper[4835]: I0202 17:28:08.894383 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wglbl\" (UniqueName: \"kubernetes.io/projected/7f86d2bc-c7cf-42c8-b62a-828961f9e880-kube-api-access-wglbl\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.348870 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" event={"ID":"7f86d2bc-c7cf-42c8-b62a-828961f9e880","Type":"ContainerDied","Data":"0270e9349afe78d28cd98cc36d76a67e47167fc92516555510146f82451d6edd"} Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.348916 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0270e9349afe78d28cd98cc36d76a67e47167fc92516555510146f82451d6edd" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.348973 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.480256 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz"] Feb 02 17:28:09 crc kubenswrapper[4835]: E0202 17:28:09.480839 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f86d2bc-c7cf-42c8-b62a-828961f9e880" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.480869 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f86d2bc-c7cf-42c8-b62a-828961f9e880" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.481143 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f86d2bc-c7cf-42c8-b62a-828961f9e880" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.482081 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.485005 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.485256 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.485766 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.486366 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.486614 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.487620 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.487913 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.489778 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.491874 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz"] Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.606156 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.606440 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.606585 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.606704 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.606832 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wtl7\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-kube-api-access-8wtl7\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.606976 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.607092 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.607203 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.607390 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.607515 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.607647 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.607713 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.607838 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710038 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710108 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710129 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710151 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710170 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710204 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wtl7\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-kube-api-access-8wtl7\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710258 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710301 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710330 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710377 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710414 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710435 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.710490 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.716244 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.716745 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.717200 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.717948 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.718045 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.718175 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.718898 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.719744 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.720661 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.721193 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.722544 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.723153 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.735238 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wtl7\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-kube-api-access-8wtl7\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:09 crc kubenswrapper[4835]: I0202 17:28:09.806627 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:10 crc kubenswrapper[4835]: I0202 17:28:10.351777 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz"] Feb 02 17:28:10 crc kubenswrapper[4835]: I0202 17:28:10.356013 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 17:28:10 crc kubenswrapper[4835]: E0202 17:28:10.810147 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f86d2bc_c7cf_42c8_b62a_828961f9e880.slice\": RecentStats: unable to find data in memory cache]" Feb 02 17:28:11 crc kubenswrapper[4835]: I0202 17:28:11.363481 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" event={"ID":"58ba2cea-000b-458c-bb8f-c3f693512a30","Type":"ContainerStarted","Data":"037f33f79621958c3c0a895b914b13f406bee5be713eb0022b14ffc94ed85384"} Feb 02 17:28:11 crc kubenswrapper[4835]: I0202 17:28:11.363855 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" event={"ID":"58ba2cea-000b-458c-bb8f-c3f693512a30","Type":"ContainerStarted","Data":"9799d2ce3d23e25a77698eed33f4fe5a612ce7bbc936118cf5a45596d6eb8c86"} Feb 02 17:28:11 crc kubenswrapper[4835]: I0202 17:28:11.382988 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" podStartSLOduration=1.890249404 podStartE2EDuration="2.382971244s" podCreationTimestamp="2026-02-02 17:28:09 +0000 UTC" firstStartedPulling="2026-02-02 17:28:10.355837181 +0000 UTC m=+2281.977441261" lastFinishedPulling="2026-02-02 17:28:10.848559021 +0000 UTC m=+2282.470163101" observedRunningTime="2026-02-02 17:28:11.381369869 +0000 UTC m=+2283.002973939" watchObservedRunningTime="2026-02-02 17:28:11.382971244 +0000 UTC m=+2283.004575324" Feb 02 17:28:21 crc kubenswrapper[4835]: E0202 17:28:21.038089 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f86d2bc_c7cf_42c8_b62a_828961f9e880.slice\": RecentStats: unable to find data in memory cache]" Feb 02 17:28:31 crc kubenswrapper[4835]: E0202 17:28:31.271151 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f86d2bc_c7cf_42c8_b62a_828961f9e880.slice\": RecentStats: unable to find data in memory cache]" Feb 02 17:28:38 crc kubenswrapper[4835]: I0202 17:28:38.606071 4835 generic.go:334] "Generic (PLEG): container finished" podID="58ba2cea-000b-458c-bb8f-c3f693512a30" containerID="037f33f79621958c3c0a895b914b13f406bee5be713eb0022b14ffc94ed85384" exitCode=0 Feb 02 17:28:38 crc kubenswrapper[4835]: I0202 17:28:38.606141 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" event={"ID":"58ba2cea-000b-458c-bb8f-c3f693512a30","Type":"ContainerDied","Data":"037f33f79621958c3c0a895b914b13f406bee5be713eb0022b14ffc94ed85384"} Feb 02 17:28:39 crc kubenswrapper[4835]: I0202 17:28:39.958689 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060315 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-repo-setup-combined-ca-bundle\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060439 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060485 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-inventory\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060526 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-libvirt-combined-ca-bundle\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060586 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-nova-combined-ca-bundle\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060626 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-ovn-default-certs-0\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060670 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-bootstrap-combined-ca-bundle\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060718 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ovn-combined-ca-bundle\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060841 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ssh-key-openstack-edpm-ipam\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060872 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060896 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-neutron-metadata-combined-ca-bundle\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060923 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ceph\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.060981 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wtl7\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-kube-api-access-8wtl7\") pod \"58ba2cea-000b-458c-bb8f-c3f693512a30\" (UID: \"58ba2cea-000b-458c-bb8f-c3f693512a30\") " Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.068857 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ceph" (OuterVolumeSpecName: "ceph") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.070595 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.070618 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.070700 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.070814 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.071361 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.071843 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.072252 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.072275 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-kube-api-access-8wtl7" (OuterVolumeSpecName: "kube-api-access-8wtl7") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "kube-api-access-8wtl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.073756 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.096379 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.097685 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-inventory" (OuterVolumeSpecName: "inventory") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.101114 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "58ba2cea-000b-458c-bb8f-c3f693512a30" (UID: "58ba2cea-000b-458c-bb8f-c3f693512a30"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.162832 4835 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.162872 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.162886 4835 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.162901 4835 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.162918 4835 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.162932 4835 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.162945 4835 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.162956 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.162969 4835 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.162983 4835 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.162996 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.163007 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wtl7\" (UniqueName: \"kubernetes.io/projected/58ba2cea-000b-458c-bb8f-c3f693512a30-kube-api-access-8wtl7\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.163020 4835 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ba2cea-000b-458c-bb8f-c3f693512a30-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.623435 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" event={"ID":"58ba2cea-000b-458c-bb8f-c3f693512a30","Type":"ContainerDied","Data":"9799d2ce3d23e25a77698eed33f4fe5a612ce7bbc936118cf5a45596d6eb8c86"} Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.623488 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9799d2ce3d23e25a77698eed33f4fe5a612ce7bbc936118cf5a45596d6eb8c86" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.623858 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.725015 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7"] Feb 02 17:28:40 crc kubenswrapper[4835]: E0202 17:28:40.725793 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58ba2cea-000b-458c-bb8f-c3f693512a30" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.725813 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="58ba2cea-000b-458c-bb8f-c3f693512a30" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.726130 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="58ba2cea-000b-458c-bb8f-c3f693512a30" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.727043 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.732476 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.735069 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.735667 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.735921 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.736114 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.774090 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.774164 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.774209 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpj5c\" (UniqueName: \"kubernetes.io/projected/7703d310-723f-40a8-bae2-d11570ea275b-kube-api-access-mpj5c\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.774241 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ssh-key-openstack-edpm-ipam\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.776183 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7"] Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.875442 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ssh-key-openstack-edpm-ipam\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.875620 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.875668 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.875708 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpj5c\" (UniqueName: \"kubernetes.io/projected/7703d310-723f-40a8-bae2-d11570ea275b-kube-api-access-mpj5c\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.879216 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ssh-key-openstack-edpm-ipam\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.879220 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.879504 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:40 crc kubenswrapper[4835]: I0202 17:28:40.895617 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpj5c\" (UniqueName: \"kubernetes.io/projected/7703d310-723f-40a8-bae2-d11570ea275b-kube-api-access-mpj5c\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:41 crc kubenswrapper[4835]: I0202 17:28:41.052708 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:41 crc kubenswrapper[4835]: E0202 17:28:41.482797 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f86d2bc_c7cf_42c8_b62a_828961f9e880.slice\": RecentStats: unable to find data in memory cache]" Feb 02 17:28:41 crc kubenswrapper[4835]: I0202 17:28:41.546014 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7"] Feb 02 17:28:41 crc kubenswrapper[4835]: I0202 17:28:41.632779 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" event={"ID":"7703d310-723f-40a8-bae2-d11570ea275b","Type":"ContainerStarted","Data":"7efb49155748fe72d73cd5a951f8257bcb7dba47ebad08e6d4fbb1867a06b97f"} Feb 02 17:28:42 crc kubenswrapper[4835]: I0202 17:28:42.640852 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" event={"ID":"7703d310-723f-40a8-bae2-d11570ea275b","Type":"ContainerStarted","Data":"8b0074288528096710466695f891fb808f621a32d2295884a3bed5f94852f7b1"} Feb 02 17:28:42 crc kubenswrapper[4835]: I0202 17:28:42.664324 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" podStartSLOduration=2.220746621 podStartE2EDuration="2.664305736s" podCreationTimestamp="2026-02-02 17:28:40 +0000 UTC" firstStartedPulling="2026-02-02 17:28:41.547242271 +0000 UTC m=+2313.168846351" lastFinishedPulling="2026-02-02 17:28:41.990801386 +0000 UTC m=+2313.612405466" observedRunningTime="2026-02-02 17:28:42.65635533 +0000 UTC m=+2314.277959410" watchObservedRunningTime="2026-02-02 17:28:42.664305736 +0000 UTC m=+2314.285909816" Feb 02 17:28:47 crc kubenswrapper[4835]: I0202 17:28:47.688650 4835 generic.go:334] "Generic (PLEG): container finished" podID="7703d310-723f-40a8-bae2-d11570ea275b" containerID="8b0074288528096710466695f891fb808f621a32d2295884a3bed5f94852f7b1" exitCode=0 Feb 02 17:28:47 crc kubenswrapper[4835]: I0202 17:28:47.688737 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" event={"ID":"7703d310-723f-40a8-bae2-d11570ea275b","Type":"ContainerDied","Data":"8b0074288528096710466695f891fb808f621a32d2295884a3bed5f94852f7b1"} Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.116144 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.214777 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpj5c\" (UniqueName: \"kubernetes.io/projected/7703d310-723f-40a8-bae2-d11570ea275b-kube-api-access-mpj5c\") pod \"7703d310-723f-40a8-bae2-d11570ea275b\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.215069 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-inventory\") pod \"7703d310-723f-40a8-bae2-d11570ea275b\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.215143 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ssh-key-openstack-edpm-ipam\") pod \"7703d310-723f-40a8-bae2-d11570ea275b\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.215238 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ceph\") pod \"7703d310-723f-40a8-bae2-d11570ea275b\" (UID: \"7703d310-723f-40a8-bae2-d11570ea275b\") " Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.221455 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ceph" (OuterVolumeSpecName: "ceph") pod "7703d310-723f-40a8-bae2-d11570ea275b" (UID: "7703d310-723f-40a8-bae2-d11570ea275b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.223917 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7703d310-723f-40a8-bae2-d11570ea275b-kube-api-access-mpj5c" (OuterVolumeSpecName: "kube-api-access-mpj5c") pod "7703d310-723f-40a8-bae2-d11570ea275b" (UID: "7703d310-723f-40a8-bae2-d11570ea275b"). InnerVolumeSpecName "kube-api-access-mpj5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.248927 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "7703d310-723f-40a8-bae2-d11570ea275b" (UID: "7703d310-723f-40a8-bae2-d11570ea275b"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.263177 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-inventory" (OuterVolumeSpecName: "inventory") pod "7703d310-723f-40a8-bae2-d11570ea275b" (UID: "7703d310-723f-40a8-bae2-d11570ea275b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.317176 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpj5c\" (UniqueName: \"kubernetes.io/projected/7703d310-723f-40a8-bae2-d11570ea275b-kube-api-access-mpj5c\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.317215 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.317224 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.317233 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7703d310-723f-40a8-bae2-d11570ea275b-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.717176 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" event={"ID":"7703d310-723f-40a8-bae2-d11570ea275b","Type":"ContainerDied","Data":"7efb49155748fe72d73cd5a951f8257bcb7dba47ebad08e6d4fbb1867a06b97f"} Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.717538 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7efb49155748fe72d73cd5a951f8257bcb7dba47ebad08e6d4fbb1867a06b97f" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.717659 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.813027 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5"] Feb 02 17:28:49 crc kubenswrapper[4835]: E0202 17:28:49.813467 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7703d310-723f-40a8-bae2-d11570ea275b" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.813493 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="7703d310-723f-40a8-bae2-d11570ea275b" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.813685 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="7703d310-723f-40a8-bae2-d11570ea275b" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.814476 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.818760 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.818916 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.818948 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.818980 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.819029 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.819133 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.857256 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5"] Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.939327 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.939417 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.939449 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.939486 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.939854 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njrfk\" (UniqueName: \"kubernetes.io/projected/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-kube-api-access-njrfk\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:49 crc kubenswrapper[4835]: I0202 17:28:49.940059 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.041517 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.041575 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.041596 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.041624 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.041703 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njrfk\" (UniqueName: \"kubernetes.io/projected/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-kube-api-access-njrfk\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.041747 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.042789 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.046581 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.049220 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.050875 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.058239 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.064346 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njrfk\" (UniqueName: \"kubernetes.io/projected/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-kube-api-access-njrfk\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5f2k5\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.137651 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.660159 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5"] Feb 02 17:28:50 crc kubenswrapper[4835]: W0202 17:28:50.667385 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51a0827f_7d93_4cd6_b0e3_aa16bdb6dbf2.slice/crio-eb96514131e9e34c78cb3d48c1ef84a0d95470c947e3e4d91e69c92ee92e324c WatchSource:0}: Error finding container eb96514131e9e34c78cb3d48c1ef84a0d95470c947e3e4d91e69c92ee92e324c: Status 404 returned error can't find the container with id eb96514131e9e34c78cb3d48c1ef84a0d95470c947e3e4d91e69c92ee92e324c Feb 02 17:28:50 crc kubenswrapper[4835]: I0202 17:28:50.728057 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" event={"ID":"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2","Type":"ContainerStarted","Data":"eb96514131e9e34c78cb3d48c1ef84a0d95470c947e3e4d91e69c92ee92e324c"} Feb 02 17:28:51 crc kubenswrapper[4835]: E0202 17:28:51.712922 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f86d2bc_c7cf_42c8_b62a_828961f9e880.slice\": RecentStats: unable to find data in memory cache]" Feb 02 17:28:51 crc kubenswrapper[4835]: I0202 17:28:51.739346 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" event={"ID":"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2","Type":"ContainerStarted","Data":"0a3597f4f55ffd228b3845fb5f30d3270de44187ab31b4f9f4cf7859997cfcc0"} Feb 02 17:28:51 crc kubenswrapper[4835]: I0202 17:28:51.763814 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" podStartSLOduration=2.085137882 podStartE2EDuration="2.763795797s" podCreationTimestamp="2026-02-02 17:28:49 +0000 UTC" firstStartedPulling="2026-02-02 17:28:50.669094877 +0000 UTC m=+2322.290698947" lastFinishedPulling="2026-02-02 17:28:51.347752792 +0000 UTC m=+2322.969356862" observedRunningTime="2026-02-02 17:28:51.760944386 +0000 UTC m=+2323.382548476" watchObservedRunningTime="2026-02-02 17:28:51.763795797 +0000 UTC m=+2323.385399877" Feb 02 17:29:01 crc kubenswrapper[4835]: E0202 17:29:01.917187 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f86d2bc_c7cf_42c8_b62a_828961f9e880.slice\": RecentStats: unable to find data in memory cache]" Feb 02 17:29:14 crc kubenswrapper[4835]: I0202 17:29:14.870844 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:29:14 crc kubenswrapper[4835]: I0202 17:29:14.871183 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:29:44 crc kubenswrapper[4835]: I0202 17:29:44.870041 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:29:44 crc kubenswrapper[4835]: I0202 17:29:44.870631 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:29:52 crc kubenswrapper[4835]: I0202 17:29:52.278660 4835 generic.go:334] "Generic (PLEG): container finished" podID="51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2" containerID="0a3597f4f55ffd228b3845fb5f30d3270de44187ab31b4f9f4cf7859997cfcc0" exitCode=0 Feb 02 17:29:52 crc kubenswrapper[4835]: I0202 17:29:52.278791 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" event={"ID":"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2","Type":"ContainerDied","Data":"0a3597f4f55ffd228b3845fb5f30d3270de44187ab31b4f9f4cf7859997cfcc0"} Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.691538 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.779123 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-inventory\") pod \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.779336 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovn-combined-ca-bundle\") pod \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.780259 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ssh-key-openstack-edpm-ipam\") pod \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.780321 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njrfk\" (UniqueName: \"kubernetes.io/projected/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-kube-api-access-njrfk\") pod \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.780467 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ceph\") pod \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.780703 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovncontroller-config-0\") pod \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\" (UID: \"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2\") " Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.785938 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2" (UID: "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.786008 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ceph" (OuterVolumeSpecName: "ceph") pod "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2" (UID: "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.786513 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-kube-api-access-njrfk" (OuterVolumeSpecName: "kube-api-access-njrfk") pod "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2" (UID: "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2"). InnerVolumeSpecName "kube-api-access-njrfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.802851 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2" (UID: "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.813654 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-inventory" (OuterVolumeSpecName: "inventory") pod "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2" (UID: "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.814039 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2" (UID: "51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.883952 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.883981 4835 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.883991 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.884000 4835 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.884011 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:29:53 crc kubenswrapper[4835]: I0202 17:29:53.884020 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njrfk\" (UniqueName: \"kubernetes.io/projected/51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2-kube-api-access-njrfk\") on node \"crc\" DevicePath \"\"" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.302655 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" event={"ID":"51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2","Type":"ContainerDied","Data":"eb96514131e9e34c78cb3d48c1ef84a0d95470c947e3e4d91e69c92ee92e324c"} Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.302685 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5f2k5" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.302714 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb96514131e9e34c78cb3d48c1ef84a0d95470c947e3e4d91e69c92ee92e324c" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.402463 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx"] Feb 02 17:29:54 crc kubenswrapper[4835]: E0202 17:29:54.403339 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.403422 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.403691 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.404337 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.406730 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.406906 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.407023 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.407587 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.407823 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.407882 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.408155 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.413332 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx"] Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.497724 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.497787 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x9sm\" (UniqueName: \"kubernetes.io/projected/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-kube-api-access-7x9sm\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.497842 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.497869 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.497884 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.497925 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.497954 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.599434 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.599483 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.599505 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.599548 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.599577 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.599632 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.599669 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x9sm\" (UniqueName: \"kubernetes.io/projected/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-kube-api-access-7x9sm\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.604108 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.604262 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.604781 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.606028 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.609426 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.617731 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.629221 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x9sm\" (UniqueName: \"kubernetes.io/projected/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-kube-api-access-7x9sm\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:54 crc kubenswrapper[4835]: I0202 17:29:54.720906 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:29:55 crc kubenswrapper[4835]: I0202 17:29:55.247730 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx"] Feb 02 17:29:55 crc kubenswrapper[4835]: I0202 17:29:55.311633 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" event={"ID":"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5","Type":"ContainerStarted","Data":"8782b7076a50cba8768033125176673d714c22997119301d4ed7a7355d3a34a4"} Feb 02 17:29:56 crc kubenswrapper[4835]: I0202 17:29:56.320521 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" event={"ID":"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5","Type":"ContainerStarted","Data":"d187f3f996c067c7ade91a204550c88114258af0aed274c01ac0c6c35108a0ce"} Feb 02 17:29:56 crc kubenswrapper[4835]: I0202 17:29:56.348119 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" podStartSLOduration=1.83066631 podStartE2EDuration="2.34809812s" podCreationTimestamp="2026-02-02 17:29:54 +0000 UTC" firstStartedPulling="2026-02-02 17:29:55.257708293 +0000 UTC m=+2386.879312383" lastFinishedPulling="2026-02-02 17:29:55.775140103 +0000 UTC m=+2387.396744193" observedRunningTime="2026-02-02 17:29:56.346144235 +0000 UTC m=+2387.967748375" watchObservedRunningTime="2026-02-02 17:29:56.34809812 +0000 UTC m=+2387.969702210" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.138730 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr"] Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.141015 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.143019 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.143220 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.158250 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr"] Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.204541 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8593a4fc-619d-4fce-810a-252425f1629c-config-volume\") pod \"collect-profiles-29500890-wzxfr\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.204845 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8593a4fc-619d-4fce-810a-252425f1629c-secret-volume\") pod \"collect-profiles-29500890-wzxfr\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.205082 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg76w\" (UniqueName: \"kubernetes.io/projected/8593a4fc-619d-4fce-810a-252425f1629c-kube-api-access-xg76w\") pod \"collect-profiles-29500890-wzxfr\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.306423 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8593a4fc-619d-4fce-810a-252425f1629c-config-volume\") pod \"collect-profiles-29500890-wzxfr\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.306465 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8593a4fc-619d-4fce-810a-252425f1629c-secret-volume\") pod \"collect-profiles-29500890-wzxfr\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.306550 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg76w\" (UniqueName: \"kubernetes.io/projected/8593a4fc-619d-4fce-810a-252425f1629c-kube-api-access-xg76w\") pod \"collect-profiles-29500890-wzxfr\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.307360 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8593a4fc-619d-4fce-810a-252425f1629c-config-volume\") pod \"collect-profiles-29500890-wzxfr\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.334137 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8593a4fc-619d-4fce-810a-252425f1629c-secret-volume\") pod \"collect-profiles-29500890-wzxfr\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.334163 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg76w\" (UniqueName: \"kubernetes.io/projected/8593a4fc-619d-4fce-810a-252425f1629c-kube-api-access-xg76w\") pod \"collect-profiles-29500890-wzxfr\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.467460 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:00 crc kubenswrapper[4835]: I0202 17:30:00.892551 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr"] Feb 02 17:30:00 crc kubenswrapper[4835]: W0202 17:30:00.898973 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8593a4fc_619d_4fce_810a_252425f1629c.slice/crio-0bdb83a4642063384966ad1d121dc4b6e9774726e690595acfdb60a20f821dad WatchSource:0}: Error finding container 0bdb83a4642063384966ad1d121dc4b6e9774726e690595acfdb60a20f821dad: Status 404 returned error can't find the container with id 0bdb83a4642063384966ad1d121dc4b6e9774726e690595acfdb60a20f821dad Feb 02 17:30:01 crc kubenswrapper[4835]: I0202 17:30:01.364046 4835 generic.go:334] "Generic (PLEG): container finished" podID="8593a4fc-619d-4fce-810a-252425f1629c" containerID="c6dbeef34c7bdf76c73715f62c3cb20da06bd3cd13e02afd60d0e37199aa80b9" exitCode=0 Feb 02 17:30:01 crc kubenswrapper[4835]: I0202 17:30:01.364099 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" event={"ID":"8593a4fc-619d-4fce-810a-252425f1629c","Type":"ContainerDied","Data":"c6dbeef34c7bdf76c73715f62c3cb20da06bd3cd13e02afd60d0e37199aa80b9"} Feb 02 17:30:01 crc kubenswrapper[4835]: I0202 17:30:01.364378 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" event={"ID":"8593a4fc-619d-4fce-810a-252425f1629c","Type":"ContainerStarted","Data":"0bdb83a4642063384966ad1d121dc4b6e9774726e690595acfdb60a20f821dad"} Feb 02 17:30:02 crc kubenswrapper[4835]: I0202 17:30:02.768025 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:02 crc kubenswrapper[4835]: I0202 17:30:02.852347 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8593a4fc-619d-4fce-810a-252425f1629c-config-volume\") pod \"8593a4fc-619d-4fce-810a-252425f1629c\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " Feb 02 17:30:02 crc kubenswrapper[4835]: I0202 17:30:02.852872 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8593a4fc-619d-4fce-810a-252425f1629c-secret-volume\") pod \"8593a4fc-619d-4fce-810a-252425f1629c\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " Feb 02 17:30:02 crc kubenswrapper[4835]: I0202 17:30:02.852928 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xg76w\" (UniqueName: \"kubernetes.io/projected/8593a4fc-619d-4fce-810a-252425f1629c-kube-api-access-xg76w\") pod \"8593a4fc-619d-4fce-810a-252425f1629c\" (UID: \"8593a4fc-619d-4fce-810a-252425f1629c\") " Feb 02 17:30:02 crc kubenswrapper[4835]: I0202 17:30:02.853206 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8593a4fc-619d-4fce-810a-252425f1629c-config-volume" (OuterVolumeSpecName: "config-volume") pod "8593a4fc-619d-4fce-810a-252425f1629c" (UID: "8593a4fc-619d-4fce-810a-252425f1629c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:30:02 crc kubenswrapper[4835]: I0202 17:30:02.853499 4835 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8593a4fc-619d-4fce-810a-252425f1629c-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 17:30:02 crc kubenswrapper[4835]: I0202 17:30:02.858777 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8593a4fc-619d-4fce-810a-252425f1629c-kube-api-access-xg76w" (OuterVolumeSpecName: "kube-api-access-xg76w") pod "8593a4fc-619d-4fce-810a-252425f1629c" (UID: "8593a4fc-619d-4fce-810a-252425f1629c"). InnerVolumeSpecName "kube-api-access-xg76w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:30:02 crc kubenswrapper[4835]: I0202 17:30:02.858800 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8593a4fc-619d-4fce-810a-252425f1629c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8593a4fc-619d-4fce-810a-252425f1629c" (UID: "8593a4fc-619d-4fce-810a-252425f1629c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:30:02 crc kubenswrapper[4835]: I0202 17:30:02.954305 4835 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8593a4fc-619d-4fce-810a-252425f1629c-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 17:30:02 crc kubenswrapper[4835]: I0202 17:30:02.954339 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xg76w\" (UniqueName: \"kubernetes.io/projected/8593a4fc-619d-4fce-810a-252425f1629c-kube-api-access-xg76w\") on node \"crc\" DevicePath \"\"" Feb 02 17:30:03 crc kubenswrapper[4835]: E0202 17:30:03.367004 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8593a4fc_619d_4fce_810a_252425f1629c.slice\": RecentStats: unable to find data in memory cache]" Feb 02 17:30:03 crc kubenswrapper[4835]: I0202 17:30:03.392074 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" event={"ID":"8593a4fc-619d-4fce-810a-252425f1629c","Type":"ContainerDied","Data":"0bdb83a4642063384966ad1d121dc4b6e9774726e690595acfdb60a20f821dad"} Feb 02 17:30:03 crc kubenswrapper[4835]: I0202 17:30:03.392114 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr" Feb 02 17:30:03 crc kubenswrapper[4835]: I0202 17:30:03.392124 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bdb83a4642063384966ad1d121dc4b6e9774726e690595acfdb60a20f821dad" Feb 02 17:30:03 crc kubenswrapper[4835]: I0202 17:30:03.848835 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788"] Feb 02 17:30:03 crc kubenswrapper[4835]: I0202 17:30:03.857324 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500845-cs788"] Feb 02 17:30:05 crc kubenswrapper[4835]: I0202 17:30:05.204078 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d67474c-95af-464f-b92a-4f2bc00dd1fd" path="/var/lib/kubelet/pods/0d67474c-95af-464f-b92a-4f2bc00dd1fd/volumes" Feb 02 17:30:13 crc kubenswrapper[4835]: I0202 17:30:13.636937 4835 scope.go:117] "RemoveContainer" containerID="5219781c385b6b91b578780f7c113d67d24c583c1db6ec0d1dfa658369805081" Feb 02 17:30:14 crc kubenswrapper[4835]: I0202 17:30:14.870653 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:30:14 crc kubenswrapper[4835]: I0202 17:30:14.870915 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:30:14 crc kubenswrapper[4835]: I0202 17:30:14.870963 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:30:14 crc kubenswrapper[4835]: I0202 17:30:14.871634 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:30:14 crc kubenswrapper[4835]: I0202 17:30:14.871688 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" gracePeriod=600 Feb 02 17:30:14 crc kubenswrapper[4835]: E0202 17:30:14.993234 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:30:15 crc kubenswrapper[4835]: I0202 17:30:15.492744 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" exitCode=0 Feb 02 17:30:15 crc kubenswrapper[4835]: I0202 17:30:15.492843 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b"} Feb 02 17:30:15 crc kubenswrapper[4835]: I0202 17:30:15.493086 4835 scope.go:117] "RemoveContainer" containerID="7b9b67ec0a70d873df2f1e945ad068f814ea600a71d949977865039aa50f1fc5" Feb 02 17:30:15 crc kubenswrapper[4835]: I0202 17:30:15.493684 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:30:15 crc kubenswrapper[4835]: E0202 17:30:15.493964 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:30:27 crc kubenswrapper[4835]: I0202 17:30:27.188949 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:30:27 crc kubenswrapper[4835]: E0202 17:30:27.189775 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:30:40 crc kubenswrapper[4835]: I0202 17:30:40.188642 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:30:40 crc kubenswrapper[4835]: E0202 17:30:40.189711 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:30:48 crc kubenswrapper[4835]: I0202 17:30:48.793604 4835 generic.go:334] "Generic (PLEG): container finished" podID="cb4a4f3f-7bb3-498f-b54c-bf0471877ff5" containerID="d187f3f996c067c7ade91a204550c88114258af0aed274c01ac0c6c35108a0ce" exitCode=0 Feb 02 17:30:48 crc kubenswrapper[4835]: I0202 17:30:48.793676 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" event={"ID":"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5","Type":"ContainerDied","Data":"d187f3f996c067c7ade91a204550c88114258af0aed274c01ac0c6c35108a0ce"} Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.256380 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.388102 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-metadata-combined-ca-bundle\") pod \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.388165 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ssh-key-openstack-edpm-ipam\") pod \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.388188 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ceph\") pod \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.388211 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7x9sm\" (UniqueName: \"kubernetes.io/projected/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-kube-api-access-7x9sm\") pod \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.388239 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-ovn-metadata-agent-neutron-config-0\") pod \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.388291 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-nova-metadata-neutron-config-0\") pod \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.388328 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-inventory\") pod \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\" (UID: \"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5\") " Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.394416 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5" (UID: "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.395264 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-kube-api-access-7x9sm" (OuterVolumeSpecName: "kube-api-access-7x9sm") pod "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5" (UID: "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5"). InnerVolumeSpecName "kube-api-access-7x9sm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.395991 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ceph" (OuterVolumeSpecName: "ceph") pod "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5" (UID: "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.415067 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-inventory" (OuterVolumeSpecName: "inventory") pod "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5" (UID: "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.415945 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5" (UID: "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.416432 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5" (UID: "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.432509 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5" (UID: "cb4a4f3f-7bb3-498f-b54c-bf0471877ff5"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.490391 4835 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.490429 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.490443 4835 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.490457 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.490469 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.490480 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7x9sm\" (UniqueName: \"kubernetes.io/projected/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-kube-api-access-7x9sm\") on node \"crc\" DevicePath \"\"" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.490493 4835 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/cb4a4f3f-7bb3-498f-b54c-bf0471877ff5-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.810854 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" event={"ID":"cb4a4f3f-7bb3-498f-b54c-bf0471877ff5","Type":"ContainerDied","Data":"8782b7076a50cba8768033125176673d714c22997119301d4ed7a7355d3a34a4"} Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.810895 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8782b7076a50cba8768033125176673d714c22997119301d4ed7a7355d3a34a4" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.810943 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.909541 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7"] Feb 02 17:30:50 crc kubenswrapper[4835]: E0202 17:30:50.909926 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8593a4fc-619d-4fce-810a-252425f1629c" containerName="collect-profiles" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.909945 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="8593a4fc-619d-4fce-810a-252425f1629c" containerName="collect-profiles" Feb 02 17:30:50 crc kubenswrapper[4835]: E0202 17:30:50.909985 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb4a4f3f-7bb3-498f-b54c-bf0471877ff5" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.909995 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb4a4f3f-7bb3-498f-b54c-bf0471877ff5" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.910206 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb4a4f3f-7bb3-498f-b54c-bf0471877ff5" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.910243 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="8593a4fc-619d-4fce-810a-252425f1629c" containerName="collect-profiles" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.910914 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.914196 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.924295 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7"] Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.927117 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.928532 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.928696 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.928786 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.929150 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.999353 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.999409 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.999439 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.999474 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.999541 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:50 crc kubenswrapper[4835]: I0202 17:30:50.999647 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28gbz\" (UniqueName: \"kubernetes.io/projected/8ebc7011-6fd1-437b-90dc-38f23dc004f5-kube-api-access-28gbz\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.101153 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28gbz\" (UniqueName: \"kubernetes.io/projected/8ebc7011-6fd1-437b-90dc-38f23dc004f5-kube-api-access-28gbz\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.101211 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.101247 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.101292 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.101321 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.101375 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.105410 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.105479 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.106166 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.106170 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.107137 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.118504 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28gbz\" (UniqueName: \"kubernetes.io/projected/8ebc7011-6fd1-437b-90dc-38f23dc004f5-kube-api-access-28gbz\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.236812 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:30:51 crc kubenswrapper[4835]: W0202 17:30:51.767232 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ebc7011_6fd1_437b_90dc_38f23dc004f5.slice/crio-4781952bd7a948ba3ec6cb6df6b7d6a26734e9810fd9a8cac15819688b8d39fe WatchSource:0}: Error finding container 4781952bd7a948ba3ec6cb6df6b7d6a26734e9810fd9a8cac15819688b8d39fe: Status 404 returned error can't find the container with id 4781952bd7a948ba3ec6cb6df6b7d6a26734e9810fd9a8cac15819688b8d39fe Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.767929 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7"] Feb 02 17:30:51 crc kubenswrapper[4835]: I0202 17:30:51.821094 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" event={"ID":"8ebc7011-6fd1-437b-90dc-38f23dc004f5","Type":"ContainerStarted","Data":"4781952bd7a948ba3ec6cb6df6b7d6a26734e9810fd9a8cac15819688b8d39fe"} Feb 02 17:30:52 crc kubenswrapper[4835]: I0202 17:30:52.829716 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" event={"ID":"8ebc7011-6fd1-437b-90dc-38f23dc004f5","Type":"ContainerStarted","Data":"33b3a58a2b455c10cb11b088605ff9cc5d0f48706b5be0d518092c36337a4ee9"} Feb 02 17:30:52 crc kubenswrapper[4835]: I0202 17:30:52.852761 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" podStartSLOduration=2.40378691 podStartE2EDuration="2.852744918s" podCreationTimestamp="2026-02-02 17:30:50 +0000 UTC" firstStartedPulling="2026-02-02 17:30:51.770376559 +0000 UTC m=+2443.391980679" lastFinishedPulling="2026-02-02 17:30:52.219334607 +0000 UTC m=+2443.840938687" observedRunningTime="2026-02-02 17:30:52.848791686 +0000 UTC m=+2444.470395766" watchObservedRunningTime="2026-02-02 17:30:52.852744918 +0000 UTC m=+2444.474348998" Feb 02 17:30:53 crc kubenswrapper[4835]: I0202 17:30:53.189499 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:30:53 crc kubenswrapper[4835]: E0202 17:30:53.189769 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:31:05 crc kubenswrapper[4835]: I0202 17:31:05.189604 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:31:05 crc kubenswrapper[4835]: E0202 17:31:05.190532 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:31:17 crc kubenswrapper[4835]: I0202 17:31:17.189068 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:31:17 crc kubenswrapper[4835]: E0202 17:31:17.190416 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:31:29 crc kubenswrapper[4835]: I0202 17:31:29.579743 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:31:29 crc kubenswrapper[4835]: E0202 17:31:29.582014 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:31:44 crc kubenswrapper[4835]: I0202 17:31:44.189667 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:31:44 crc kubenswrapper[4835]: E0202 17:31:44.190681 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:31:58 crc kubenswrapper[4835]: I0202 17:31:58.188655 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:31:58 crc kubenswrapper[4835]: E0202 17:31:58.189355 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:32:11 crc kubenswrapper[4835]: I0202 17:32:11.189967 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:32:11 crc kubenswrapper[4835]: E0202 17:32:11.190794 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:32:26 crc kubenswrapper[4835]: I0202 17:32:26.189509 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:32:26 crc kubenswrapper[4835]: E0202 17:32:26.190675 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:32:38 crc kubenswrapper[4835]: I0202 17:32:38.189009 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:32:38 crc kubenswrapper[4835]: E0202 17:32:38.190746 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:32:53 crc kubenswrapper[4835]: I0202 17:32:53.190584 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:32:53 crc kubenswrapper[4835]: E0202 17:32:53.191909 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.436402 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dphrn"] Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.439076 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.492319 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dphrn"] Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.619576 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-catalog-content\") pod \"redhat-operators-dphrn\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.619667 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggg94\" (UniqueName: \"kubernetes.io/projected/d3846511-3420-420e-8b46-c27d9b910738-kube-api-access-ggg94\") pod \"redhat-operators-dphrn\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.619718 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-utilities\") pod \"redhat-operators-dphrn\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.721385 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-catalog-content\") pod \"redhat-operators-dphrn\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.721447 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggg94\" (UniqueName: \"kubernetes.io/projected/d3846511-3420-420e-8b46-c27d9b910738-kube-api-access-ggg94\") pod \"redhat-operators-dphrn\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.721496 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-utilities\") pod \"redhat-operators-dphrn\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.721899 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-catalog-content\") pod \"redhat-operators-dphrn\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.721905 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-utilities\") pod \"redhat-operators-dphrn\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.743418 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggg94\" (UniqueName: \"kubernetes.io/projected/d3846511-3420-420e-8b46-c27d9b910738-kube-api-access-ggg94\") pod \"redhat-operators-dphrn\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:02 crc kubenswrapper[4835]: I0202 17:33:02.809737 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:03 crc kubenswrapper[4835]: I0202 17:33:03.284409 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dphrn"] Feb 02 17:33:04 crc kubenswrapper[4835]: I0202 17:33:04.089078 4835 generic.go:334] "Generic (PLEG): container finished" podID="d3846511-3420-420e-8b46-c27d9b910738" containerID="2876061713a063f0973044705e1e5b7756a5f4e15bfbfad6be84b92ab9d96a82" exitCode=0 Feb 02 17:33:04 crc kubenswrapper[4835]: I0202 17:33:04.089162 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dphrn" event={"ID":"d3846511-3420-420e-8b46-c27d9b910738","Type":"ContainerDied","Data":"2876061713a063f0973044705e1e5b7756a5f4e15bfbfad6be84b92ab9d96a82"} Feb 02 17:33:04 crc kubenswrapper[4835]: I0202 17:33:04.089210 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dphrn" event={"ID":"d3846511-3420-420e-8b46-c27d9b910738","Type":"ContainerStarted","Data":"f39d00d3ad9f124b6d0c27743435971921f8aeef0316d5c9ec6ae2895580dbbd"} Feb 02 17:33:05 crc kubenswrapper[4835]: I0202 17:33:05.098533 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dphrn" event={"ID":"d3846511-3420-420e-8b46-c27d9b910738","Type":"ContainerStarted","Data":"a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137"} Feb 02 17:33:07 crc kubenswrapper[4835]: I0202 17:33:07.116333 4835 generic.go:334] "Generic (PLEG): container finished" podID="d3846511-3420-420e-8b46-c27d9b910738" containerID="a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137" exitCode=0 Feb 02 17:33:07 crc kubenswrapper[4835]: I0202 17:33:07.116431 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dphrn" event={"ID":"d3846511-3420-420e-8b46-c27d9b910738","Type":"ContainerDied","Data":"a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137"} Feb 02 17:33:07 crc kubenswrapper[4835]: I0202 17:33:07.216117 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:33:07 crc kubenswrapper[4835]: E0202 17:33:07.216355 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:33:09 crc kubenswrapper[4835]: I0202 17:33:09.138838 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dphrn" event={"ID":"d3846511-3420-420e-8b46-c27d9b910738","Type":"ContainerStarted","Data":"9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014"} Feb 02 17:33:09 crc kubenswrapper[4835]: I0202 17:33:09.160013 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dphrn" podStartSLOduration=3.175841521 podStartE2EDuration="7.159983243s" podCreationTimestamp="2026-02-02 17:33:02 +0000 UTC" firstStartedPulling="2026-02-02 17:33:04.094213392 +0000 UTC m=+2575.715817482" lastFinishedPulling="2026-02-02 17:33:08.078355084 +0000 UTC m=+2579.699959204" observedRunningTime="2026-02-02 17:33:09.159475199 +0000 UTC m=+2580.781079299" watchObservedRunningTime="2026-02-02 17:33:09.159983243 +0000 UTC m=+2580.781587363" Feb 02 17:33:12 crc kubenswrapper[4835]: I0202 17:33:12.810461 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:12 crc kubenswrapper[4835]: I0202 17:33:12.811245 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:13 crc kubenswrapper[4835]: I0202 17:33:13.873750 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dphrn" podUID="d3846511-3420-420e-8b46-c27d9b910738" containerName="registry-server" probeResult="failure" output=< Feb 02 17:33:13 crc kubenswrapper[4835]: timeout: failed to connect service ":50051" within 1s Feb 02 17:33:13 crc kubenswrapper[4835]: > Feb 02 17:33:21 crc kubenswrapper[4835]: I0202 17:33:21.188566 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:33:21 crc kubenswrapper[4835]: E0202 17:33:21.189301 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:33:22 crc kubenswrapper[4835]: I0202 17:33:22.872172 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:22 crc kubenswrapper[4835]: I0202 17:33:22.946979 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:23 crc kubenswrapper[4835]: I0202 17:33:23.124994 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dphrn"] Feb 02 17:33:24 crc kubenswrapper[4835]: I0202 17:33:24.264653 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dphrn" podUID="d3846511-3420-420e-8b46-c27d9b910738" containerName="registry-server" containerID="cri-o://9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014" gracePeriod=2 Feb 02 17:33:24 crc kubenswrapper[4835]: I0202 17:33:24.647538 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:24 crc kubenswrapper[4835]: I0202 17:33:24.758547 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-utilities\") pod \"d3846511-3420-420e-8b46-c27d9b910738\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " Feb 02 17:33:24 crc kubenswrapper[4835]: I0202 17:33:24.758653 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggg94\" (UniqueName: \"kubernetes.io/projected/d3846511-3420-420e-8b46-c27d9b910738-kube-api-access-ggg94\") pod \"d3846511-3420-420e-8b46-c27d9b910738\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " Feb 02 17:33:24 crc kubenswrapper[4835]: I0202 17:33:24.758749 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-catalog-content\") pod \"d3846511-3420-420e-8b46-c27d9b910738\" (UID: \"d3846511-3420-420e-8b46-c27d9b910738\") " Feb 02 17:33:24 crc kubenswrapper[4835]: I0202 17:33:24.759519 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-utilities" (OuterVolumeSpecName: "utilities") pod "d3846511-3420-420e-8b46-c27d9b910738" (UID: "d3846511-3420-420e-8b46-c27d9b910738"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:33:24 crc kubenswrapper[4835]: I0202 17:33:24.766396 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3846511-3420-420e-8b46-c27d9b910738-kube-api-access-ggg94" (OuterVolumeSpecName: "kube-api-access-ggg94") pod "d3846511-3420-420e-8b46-c27d9b910738" (UID: "d3846511-3420-420e-8b46-c27d9b910738"). InnerVolumeSpecName "kube-api-access-ggg94". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:33:24 crc kubenswrapper[4835]: I0202 17:33:24.861660 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:33:24 crc kubenswrapper[4835]: I0202 17:33:24.861716 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggg94\" (UniqueName: \"kubernetes.io/projected/d3846511-3420-420e-8b46-c27d9b910738-kube-api-access-ggg94\") on node \"crc\" DevicePath \"\"" Feb 02 17:33:24 crc kubenswrapper[4835]: I0202 17:33:24.886343 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d3846511-3420-420e-8b46-c27d9b910738" (UID: "d3846511-3420-420e-8b46-c27d9b910738"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:33:24 crc kubenswrapper[4835]: I0202 17:33:24.963511 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3846511-3420-420e-8b46-c27d9b910738-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.285166 4835 generic.go:334] "Generic (PLEG): container finished" podID="d3846511-3420-420e-8b46-c27d9b910738" containerID="9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014" exitCode=0 Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.285213 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dphrn" event={"ID":"d3846511-3420-420e-8b46-c27d9b910738","Type":"ContainerDied","Data":"9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014"} Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.285245 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dphrn" event={"ID":"d3846511-3420-420e-8b46-c27d9b910738","Type":"ContainerDied","Data":"f39d00d3ad9f124b6d0c27743435971921f8aeef0316d5c9ec6ae2895580dbbd"} Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.285268 4835 scope.go:117] "RemoveContainer" containerID="9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014" Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.285462 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dphrn" Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.354170 4835 scope.go:117] "RemoveContainer" containerID="a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137" Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.356065 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dphrn"] Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.364041 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dphrn"] Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.381544 4835 scope.go:117] "RemoveContainer" containerID="2876061713a063f0973044705e1e5b7756a5f4e15bfbfad6be84b92ab9d96a82" Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.418251 4835 scope.go:117] "RemoveContainer" containerID="9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014" Feb 02 17:33:25 crc kubenswrapper[4835]: E0202 17:33:25.418791 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014\": container with ID starting with 9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014 not found: ID does not exist" containerID="9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014" Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.418828 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014"} err="failed to get container status \"9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014\": rpc error: code = NotFound desc = could not find container \"9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014\": container with ID starting with 9220492b47427333eba2316989b6a2e88c4f492545354aefd9e9dec40f7b4014 not found: ID does not exist" Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.418857 4835 scope.go:117] "RemoveContainer" containerID="a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137" Feb 02 17:33:25 crc kubenswrapper[4835]: E0202 17:33:25.419310 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137\": container with ID starting with a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137 not found: ID does not exist" containerID="a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137" Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.419377 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137"} err="failed to get container status \"a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137\": rpc error: code = NotFound desc = could not find container \"a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137\": container with ID starting with a388f6ba5612cf33e31e991242fd1ee3c513610c7a22d07591a474fd7ac7f137 not found: ID does not exist" Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.419416 4835 scope.go:117] "RemoveContainer" containerID="2876061713a063f0973044705e1e5b7756a5f4e15bfbfad6be84b92ab9d96a82" Feb 02 17:33:25 crc kubenswrapper[4835]: E0202 17:33:25.419792 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2876061713a063f0973044705e1e5b7756a5f4e15bfbfad6be84b92ab9d96a82\": container with ID starting with 2876061713a063f0973044705e1e5b7756a5f4e15bfbfad6be84b92ab9d96a82 not found: ID does not exist" containerID="2876061713a063f0973044705e1e5b7756a5f4e15bfbfad6be84b92ab9d96a82" Feb 02 17:33:25 crc kubenswrapper[4835]: I0202 17:33:25.419833 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2876061713a063f0973044705e1e5b7756a5f4e15bfbfad6be84b92ab9d96a82"} err="failed to get container status \"2876061713a063f0973044705e1e5b7756a5f4e15bfbfad6be84b92ab9d96a82\": rpc error: code = NotFound desc = could not find container \"2876061713a063f0973044705e1e5b7756a5f4e15bfbfad6be84b92ab9d96a82\": container with ID starting with 2876061713a063f0973044705e1e5b7756a5f4e15bfbfad6be84b92ab9d96a82 not found: ID does not exist" Feb 02 17:33:27 crc kubenswrapper[4835]: I0202 17:33:27.203667 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3846511-3420-420e-8b46-c27d9b910738" path="/var/lib/kubelet/pods/d3846511-3420-420e-8b46-c27d9b910738/volumes" Feb 02 17:33:33 crc kubenswrapper[4835]: I0202 17:33:33.188672 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:33:33 crc kubenswrapper[4835]: E0202 17:33:33.189426 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:33:45 crc kubenswrapper[4835]: I0202 17:33:45.188493 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:33:45 crc kubenswrapper[4835]: E0202 17:33:45.189226 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:33:57 crc kubenswrapper[4835]: I0202 17:33:57.189415 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:33:57 crc kubenswrapper[4835]: E0202 17:33:57.190147 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:34:11 crc kubenswrapper[4835]: I0202 17:34:11.190041 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:34:11 crc kubenswrapper[4835]: E0202 17:34:11.191159 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:34:22 crc kubenswrapper[4835]: I0202 17:34:22.188725 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:34:22 crc kubenswrapper[4835]: E0202 17:34:22.189832 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:34:37 crc kubenswrapper[4835]: I0202 17:34:37.188824 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:34:37 crc kubenswrapper[4835]: E0202 17:34:37.190961 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:34:52 crc kubenswrapper[4835]: I0202 17:34:52.189653 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:34:52 crc kubenswrapper[4835]: E0202 17:34:52.190544 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:34:57 crc kubenswrapper[4835]: I0202 17:34:57.125368 4835 generic.go:334] "Generic (PLEG): container finished" podID="8ebc7011-6fd1-437b-90dc-38f23dc004f5" containerID="33b3a58a2b455c10cb11b088605ff9cc5d0f48706b5be0d518092c36337a4ee9" exitCode=0 Feb 02 17:34:57 crc kubenswrapper[4835]: I0202 17:34:57.125442 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" event={"ID":"8ebc7011-6fd1-437b-90dc-38f23dc004f5","Type":"ContainerDied","Data":"33b3a58a2b455c10cb11b088605ff9cc5d0f48706b5be0d518092c36337a4ee9"} Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.544766 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.682767 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ssh-key-openstack-edpm-ipam\") pod \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.682850 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-inventory\") pod \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.682988 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28gbz\" (UniqueName: \"kubernetes.io/projected/8ebc7011-6fd1-437b-90dc-38f23dc004f5-kube-api-access-28gbz\") pod \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.683040 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-combined-ca-bundle\") pod \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.683059 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ceph\") pod \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.683100 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-secret-0\") pod \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\" (UID: \"8ebc7011-6fd1-437b-90dc-38f23dc004f5\") " Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.691896 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "8ebc7011-6fd1-437b-90dc-38f23dc004f5" (UID: "8ebc7011-6fd1-437b-90dc-38f23dc004f5"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.693428 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ceph" (OuterVolumeSpecName: "ceph") pod "8ebc7011-6fd1-437b-90dc-38f23dc004f5" (UID: "8ebc7011-6fd1-437b-90dc-38f23dc004f5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.693833 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ebc7011-6fd1-437b-90dc-38f23dc004f5-kube-api-access-28gbz" (OuterVolumeSpecName: "kube-api-access-28gbz") pod "8ebc7011-6fd1-437b-90dc-38f23dc004f5" (UID: "8ebc7011-6fd1-437b-90dc-38f23dc004f5"). InnerVolumeSpecName "kube-api-access-28gbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.713841 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "8ebc7011-6fd1-437b-90dc-38f23dc004f5" (UID: "8ebc7011-6fd1-437b-90dc-38f23dc004f5"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.735336 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-inventory" (OuterVolumeSpecName: "inventory") pod "8ebc7011-6fd1-437b-90dc-38f23dc004f5" (UID: "8ebc7011-6fd1-437b-90dc-38f23dc004f5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.749104 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "8ebc7011-6fd1-437b-90dc-38f23dc004f5" (UID: "8ebc7011-6fd1-437b-90dc-38f23dc004f5"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.793413 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.793450 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28gbz\" (UniqueName: \"kubernetes.io/projected/8ebc7011-6fd1-437b-90dc-38f23dc004f5-kube-api-access-28gbz\") on node \"crc\" DevicePath \"\"" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.793468 4835 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.793479 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.793490 4835 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:34:58 crc kubenswrapper[4835]: I0202 17:34:58.793501 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8ebc7011-6fd1-437b-90dc-38f23dc004f5-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.146673 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" event={"ID":"8ebc7011-6fd1-437b-90dc-38f23dc004f5","Type":"ContainerDied","Data":"4781952bd7a948ba3ec6cb6df6b7d6a26734e9810fd9a8cac15819688b8d39fe"} Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.146918 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4781952bd7a948ba3ec6cb6df6b7d6a26734e9810fd9a8cac15819688b8d39fe" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.147067 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.264250 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7"] Feb 02 17:34:59 crc kubenswrapper[4835]: E0202 17:34:59.264668 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3846511-3420-420e-8b46-c27d9b910738" containerName="extract-content" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.264690 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3846511-3420-420e-8b46-c27d9b910738" containerName="extract-content" Feb 02 17:34:59 crc kubenswrapper[4835]: E0202 17:34:59.264719 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3846511-3420-420e-8b46-c27d9b910738" containerName="extract-utilities" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.264727 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3846511-3420-420e-8b46-c27d9b910738" containerName="extract-utilities" Feb 02 17:34:59 crc kubenswrapper[4835]: E0202 17:34:59.264752 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ebc7011-6fd1-437b-90dc-38f23dc004f5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.264761 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ebc7011-6fd1-437b-90dc-38f23dc004f5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 02 17:34:59 crc kubenswrapper[4835]: E0202 17:34:59.264777 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3846511-3420-420e-8b46-c27d9b910738" containerName="registry-server" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.264785 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3846511-3420-420e-8b46-c27d9b910738" containerName="registry-server" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.265034 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ebc7011-6fd1-437b-90dc-38f23dc004f5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.265058 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3846511-3420-420e-8b46-c27d9b910738" containerName="registry-server" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.265972 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.270508 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q9zm8" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.270750 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.270514 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.271060 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.270622 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.271429 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ceph-nova" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.271759 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.272037 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.272251 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.289740 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7"] Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.423339 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.423399 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ssh-key-openstack-edpm-ipam\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.423422 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.423444 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.423482 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.423505 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.423548 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr2zw\" (UniqueName: \"kubernetes.io/projected/224a86ad-9920-4e35-8470-e48d3af63934-kube-api-access-rr2zw\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.423581 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.423610 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.423656 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.423690 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.525794 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.526266 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.526666 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ssh-key-openstack-edpm-ipam\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.526936 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.527232 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.527636 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.527966 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.528270 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr2zw\" (UniqueName: \"kubernetes.io/projected/224a86ad-9920-4e35-8470-e48d3af63934-kube-api-access-rr2zw\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.528742 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.529044 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.529431 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.529061 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.531417 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.532685 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.534105 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.535696 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.536706 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.536747 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.540540 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.540567 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ssh-key-openstack-edpm-ipam\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.544745 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr2zw\" (UniqueName: \"kubernetes.io/projected/224a86ad-9920-4e35-8470-e48d3af63934-kube-api-access-rr2zw\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.545816 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:34:59 crc kubenswrapper[4835]: I0202 17:34:59.629063 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:35:00 crc kubenswrapper[4835]: I0202 17:35:00.196399 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7"] Feb 02 17:35:00 crc kubenswrapper[4835]: W0202 17:35:00.197915 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod224a86ad_9920_4e35_8470_e48d3af63934.slice/crio-5a7b5b15af4b1f404bd22c584de0011173e5d1aa72d5102993f6301625542c28 WatchSource:0}: Error finding container 5a7b5b15af4b1f404bd22c584de0011173e5d1aa72d5102993f6301625542c28: Status 404 returned error can't find the container with id 5a7b5b15af4b1f404bd22c584de0011173e5d1aa72d5102993f6301625542c28 Feb 02 17:35:00 crc kubenswrapper[4835]: I0202 17:35:00.201314 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 17:35:01 crc kubenswrapper[4835]: I0202 17:35:01.162039 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" event={"ID":"224a86ad-9920-4e35-8470-e48d3af63934","Type":"ContainerStarted","Data":"5a7b5b15af4b1f404bd22c584de0011173e5d1aa72d5102993f6301625542c28"} Feb 02 17:35:02 crc kubenswrapper[4835]: I0202 17:35:02.172464 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" event={"ID":"224a86ad-9920-4e35-8470-e48d3af63934","Type":"ContainerStarted","Data":"c1543e83a01118e564ba40d559adbc8cb6d9cbb44c8144930e3c41d54a4baf5c"} Feb 02 17:35:02 crc kubenswrapper[4835]: I0202 17:35:02.197672 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" podStartSLOduration=2.465759458 podStartE2EDuration="3.197647914s" podCreationTimestamp="2026-02-02 17:34:59 +0000 UTC" firstStartedPulling="2026-02-02 17:35:00.200992252 +0000 UTC m=+2691.822596332" lastFinishedPulling="2026-02-02 17:35:00.932880708 +0000 UTC m=+2692.554484788" observedRunningTime="2026-02-02 17:35:02.19469967 +0000 UTC m=+2693.816303780" watchObservedRunningTime="2026-02-02 17:35:02.197647914 +0000 UTC m=+2693.819252014" Feb 02 17:35:05 crc kubenswrapper[4835]: I0202 17:35:05.189555 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:35:05 crc kubenswrapper[4835]: E0202 17:35:05.190166 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.562475 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hvn5n"] Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.564324 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.595897 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hvn5n"] Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.687464 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-catalog-content\") pod \"certified-operators-hvn5n\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.687598 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-utilities\") pod \"certified-operators-hvn5n\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.687756 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs4dl\" (UniqueName: \"kubernetes.io/projected/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-kube-api-access-zs4dl\") pod \"certified-operators-hvn5n\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.758096 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kbf6q"] Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.759949 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.782850 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kbf6q"] Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.789880 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-catalog-content\") pod \"certified-operators-hvn5n\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.790063 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zjzp\" (UniqueName: \"kubernetes.io/projected/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-kube-api-access-4zjzp\") pod \"community-operators-kbf6q\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.790121 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-utilities\") pod \"certified-operators-hvn5n\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.790156 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-utilities\") pod \"community-operators-kbf6q\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.790247 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-catalog-content\") pod \"community-operators-kbf6q\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.790351 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs4dl\" (UniqueName: \"kubernetes.io/projected/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-kube-api-access-zs4dl\") pod \"certified-operators-hvn5n\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.790654 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-catalog-content\") pod \"certified-operators-hvn5n\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.790678 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-utilities\") pod \"certified-operators-hvn5n\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.827344 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs4dl\" (UniqueName: \"kubernetes.io/projected/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-kube-api-access-zs4dl\") pod \"certified-operators-hvn5n\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.891591 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-catalog-content\") pod \"community-operators-kbf6q\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.891728 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zjzp\" (UniqueName: \"kubernetes.io/projected/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-kube-api-access-4zjzp\") pod \"community-operators-kbf6q\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.891758 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-utilities\") pod \"community-operators-kbf6q\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.892412 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-catalog-content\") pod \"community-operators-kbf6q\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.892449 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-utilities\") pod \"community-operators-kbf6q\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.900229 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:07 crc kubenswrapper[4835]: I0202 17:35:07.910709 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zjzp\" (UniqueName: \"kubernetes.io/projected/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-kube-api-access-4zjzp\") pod \"community-operators-kbf6q\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:08 crc kubenswrapper[4835]: I0202 17:35:08.081741 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:08 crc kubenswrapper[4835]: I0202 17:35:08.504968 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hvn5n"] Feb 02 17:35:08 crc kubenswrapper[4835]: I0202 17:35:08.732759 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kbf6q"] Feb 02 17:35:08 crc kubenswrapper[4835]: W0202 17:35:08.768986 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98a36f40_50eb_4c5e_8b4e_7b15fef1fa0f.slice/crio-477c6832bcbfdaa09c8fb4f322e2b40e784bfff5c0bb297eead7a769e57635ba WatchSource:0}: Error finding container 477c6832bcbfdaa09c8fb4f322e2b40e784bfff5c0bb297eead7a769e57635ba: Status 404 returned error can't find the container with id 477c6832bcbfdaa09c8fb4f322e2b40e784bfff5c0bb297eead7a769e57635ba Feb 02 17:35:09 crc kubenswrapper[4835]: I0202 17:35:09.243421 4835 generic.go:334] "Generic (PLEG): container finished" podID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" containerID="6fcd4c575203745d0633bf30f127cc5c9a0c7726aeeb212c65b4ba583dde3676" exitCode=0 Feb 02 17:35:09 crc kubenswrapper[4835]: I0202 17:35:09.244969 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvn5n" event={"ID":"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0","Type":"ContainerDied","Data":"6fcd4c575203745d0633bf30f127cc5c9a0c7726aeeb212c65b4ba583dde3676"} Feb 02 17:35:09 crc kubenswrapper[4835]: I0202 17:35:09.245245 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvn5n" event={"ID":"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0","Type":"ContainerStarted","Data":"20204b4a69fcf01fe59ed7905f93e242a6fe799d175f6912ee48e6fd144dac5c"} Feb 02 17:35:09 crc kubenswrapper[4835]: I0202 17:35:09.249137 4835 generic.go:334] "Generic (PLEG): container finished" podID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" containerID="9155c7ddfbab0d2347e144d5ab6231fb1beec8f2befd9f318971d518f5aacb4a" exitCode=0 Feb 02 17:35:09 crc kubenswrapper[4835]: I0202 17:35:09.249184 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbf6q" event={"ID":"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f","Type":"ContainerDied","Data":"9155c7ddfbab0d2347e144d5ab6231fb1beec8f2befd9f318971d518f5aacb4a"} Feb 02 17:35:09 crc kubenswrapper[4835]: I0202 17:35:09.249210 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbf6q" event={"ID":"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f","Type":"ContainerStarted","Data":"477c6832bcbfdaa09c8fb4f322e2b40e784bfff5c0bb297eead7a769e57635ba"} Feb 02 17:35:10 crc kubenswrapper[4835]: I0202 17:35:10.260762 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbf6q" event={"ID":"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f","Type":"ContainerStarted","Data":"1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f"} Feb 02 17:35:11 crc kubenswrapper[4835]: I0202 17:35:11.273383 4835 generic.go:334] "Generic (PLEG): container finished" podID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" containerID="7fa8013a90cb6fae50ee277b2f211ecb695f41d384284cca9ccbb323869fe46d" exitCode=0 Feb 02 17:35:11 crc kubenswrapper[4835]: I0202 17:35:11.273444 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvn5n" event={"ID":"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0","Type":"ContainerDied","Data":"7fa8013a90cb6fae50ee277b2f211ecb695f41d384284cca9ccbb323869fe46d"} Feb 02 17:35:11 crc kubenswrapper[4835]: I0202 17:35:11.276478 4835 generic.go:334] "Generic (PLEG): container finished" podID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" containerID="1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f" exitCode=0 Feb 02 17:35:11 crc kubenswrapper[4835]: I0202 17:35:11.276573 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbf6q" event={"ID":"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f","Type":"ContainerDied","Data":"1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f"} Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.287637 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvn5n" event={"ID":"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0","Type":"ContainerStarted","Data":"f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a"} Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.290501 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbf6q" event={"ID":"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f","Type":"ContainerStarted","Data":"060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02"} Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.312741 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hvn5n" podStartSLOduration=2.878104523 podStartE2EDuration="5.312723111s" podCreationTimestamp="2026-02-02 17:35:07 +0000 UTC" firstStartedPulling="2026-02-02 17:35:09.245631788 +0000 UTC m=+2700.867235868" lastFinishedPulling="2026-02-02 17:35:11.680250376 +0000 UTC m=+2703.301854456" observedRunningTime="2026-02-02 17:35:12.309684035 +0000 UTC m=+2703.931288125" watchObservedRunningTime="2026-02-02 17:35:12.312723111 +0000 UTC m=+2703.934327191" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.331416 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kbf6q" podStartSLOduration=2.871162827 podStartE2EDuration="5.331397001s" podCreationTimestamp="2026-02-02 17:35:07 +0000 UTC" firstStartedPulling="2026-02-02 17:35:09.250503657 +0000 UTC m=+2700.872107737" lastFinishedPulling="2026-02-02 17:35:11.710737831 +0000 UTC m=+2703.332341911" observedRunningTime="2026-02-02 17:35:12.331148784 +0000 UTC m=+2703.952752864" watchObservedRunningTime="2026-02-02 17:35:12.331397001 +0000 UTC m=+2703.953001081" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.356990 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xpxsk"] Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.369633 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xpxsk"] Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.369732 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.541449 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-utilities\") pod \"redhat-marketplace-xpxsk\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.541596 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-catalog-content\") pod \"redhat-marketplace-xpxsk\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.541794 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4dxk\" (UniqueName: \"kubernetes.io/projected/72c721db-bdc9-49ec-9380-5336da467219-kube-api-access-x4dxk\") pod \"redhat-marketplace-xpxsk\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.643438 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-utilities\") pod \"redhat-marketplace-xpxsk\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.643520 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-catalog-content\") pod \"redhat-marketplace-xpxsk\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.643574 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4dxk\" (UniqueName: \"kubernetes.io/projected/72c721db-bdc9-49ec-9380-5336da467219-kube-api-access-x4dxk\") pod \"redhat-marketplace-xpxsk\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.643971 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-catalog-content\") pod \"redhat-marketplace-xpxsk\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.644734 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-utilities\") pod \"redhat-marketplace-xpxsk\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.661335 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4dxk\" (UniqueName: \"kubernetes.io/projected/72c721db-bdc9-49ec-9380-5336da467219-kube-api-access-x4dxk\") pod \"redhat-marketplace-xpxsk\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:12 crc kubenswrapper[4835]: I0202 17:35:12.688152 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:13 crc kubenswrapper[4835]: I0202 17:35:13.187372 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xpxsk"] Feb 02 17:35:13 crc kubenswrapper[4835]: W0202 17:35:13.214130 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72c721db_bdc9_49ec_9380_5336da467219.slice/crio-09e7f004d88f317d802c7c3fdcccf7f62880939b74c08e4ff34ed4e6460faf70 WatchSource:0}: Error finding container 09e7f004d88f317d802c7c3fdcccf7f62880939b74c08e4ff34ed4e6460faf70: Status 404 returned error can't find the container with id 09e7f004d88f317d802c7c3fdcccf7f62880939b74c08e4ff34ed4e6460faf70 Feb 02 17:35:13 crc kubenswrapper[4835]: I0202 17:35:13.301087 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xpxsk" event={"ID":"72c721db-bdc9-49ec-9380-5336da467219","Type":"ContainerStarted","Data":"09e7f004d88f317d802c7c3fdcccf7f62880939b74c08e4ff34ed4e6460faf70"} Feb 02 17:35:14 crc kubenswrapper[4835]: I0202 17:35:14.314257 4835 generic.go:334] "Generic (PLEG): container finished" podID="72c721db-bdc9-49ec-9380-5336da467219" containerID="d136306d4ba750cc04f7f0e10c49b23b67caadcdf85180c2ff3ca8cf2576fc14" exitCode=0 Feb 02 17:35:14 crc kubenswrapper[4835]: I0202 17:35:14.314369 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xpxsk" event={"ID":"72c721db-bdc9-49ec-9380-5336da467219","Type":"ContainerDied","Data":"d136306d4ba750cc04f7f0e10c49b23b67caadcdf85180c2ff3ca8cf2576fc14"} Feb 02 17:35:15 crc kubenswrapper[4835]: I0202 17:35:15.323063 4835 generic.go:334] "Generic (PLEG): container finished" podID="72c721db-bdc9-49ec-9380-5336da467219" containerID="4fcb32b0d7755f1d0cb9de10e20ee7c29981fa85732a8f08348a7d2f63c79c1a" exitCode=0 Feb 02 17:35:15 crc kubenswrapper[4835]: I0202 17:35:15.323162 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xpxsk" event={"ID":"72c721db-bdc9-49ec-9380-5336da467219","Type":"ContainerDied","Data":"4fcb32b0d7755f1d0cb9de10e20ee7c29981fa85732a8f08348a7d2f63c79c1a"} Feb 02 17:35:16 crc kubenswrapper[4835]: I0202 17:35:16.333301 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xpxsk" event={"ID":"72c721db-bdc9-49ec-9380-5336da467219","Type":"ContainerStarted","Data":"de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65"} Feb 02 17:35:16 crc kubenswrapper[4835]: I0202 17:35:16.358492 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xpxsk" podStartSLOduration=2.956671578 podStartE2EDuration="4.358472612s" podCreationTimestamp="2026-02-02 17:35:12 +0000 UTC" firstStartedPulling="2026-02-02 17:35:14.316193776 +0000 UTC m=+2705.937797876" lastFinishedPulling="2026-02-02 17:35:15.71799483 +0000 UTC m=+2707.339598910" observedRunningTime="2026-02-02 17:35:16.350368632 +0000 UTC m=+2707.971972782" watchObservedRunningTime="2026-02-02 17:35:16.358472612 +0000 UTC m=+2707.980076692" Feb 02 17:35:17 crc kubenswrapper[4835]: I0202 17:35:17.901414 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:17 crc kubenswrapper[4835]: I0202 17:35:17.901686 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:17 crc kubenswrapper[4835]: I0202 17:35:17.945785 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:18 crc kubenswrapper[4835]: I0202 17:35:18.082458 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:18 crc kubenswrapper[4835]: I0202 17:35:18.082514 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:18 crc kubenswrapper[4835]: I0202 17:35:18.128065 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:18 crc kubenswrapper[4835]: I0202 17:35:18.392627 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:18 crc kubenswrapper[4835]: I0202 17:35:18.401546 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:19 crc kubenswrapper[4835]: I0202 17:35:19.195447 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:35:20 crc kubenswrapper[4835]: I0202 17:35:20.404542 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"8c0c1dd28e739785aa59f1b10ef0e393360cdd138b956085fece6a9d4036c389"} Feb 02 17:35:21 crc kubenswrapper[4835]: I0202 17:35:21.545013 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hvn5n"] Feb 02 17:35:21 crc kubenswrapper[4835]: I0202 17:35:21.545567 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hvn5n" podUID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" containerName="registry-server" containerID="cri-o://f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a" gracePeriod=2 Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.034920 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.237887 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zs4dl\" (UniqueName: \"kubernetes.io/projected/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-kube-api-access-zs4dl\") pod \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.238400 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-utilities\") pod \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.238493 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-catalog-content\") pod \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\" (UID: \"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0\") " Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.239021 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-utilities" (OuterVolumeSpecName: "utilities") pod "5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" (UID: "5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.239684 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.245491 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-kube-api-access-zs4dl" (OuterVolumeSpecName: "kube-api-access-zs4dl") pod "5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" (UID: "5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0"). InnerVolumeSpecName "kube-api-access-zs4dl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.297085 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" (UID: "5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.341855 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.341895 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zs4dl\" (UniqueName: \"kubernetes.io/projected/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0-kube-api-access-zs4dl\") on node \"crc\" DevicePath \"\"" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.422951 4835 generic.go:334] "Generic (PLEG): container finished" podID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" containerID="f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a" exitCode=0 Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.423004 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvn5n" event={"ID":"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0","Type":"ContainerDied","Data":"f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a"} Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.423039 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hvn5n" event={"ID":"5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0","Type":"ContainerDied","Data":"20204b4a69fcf01fe59ed7905f93e242a6fe799d175f6912ee48e6fd144dac5c"} Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.423065 4835 scope.go:117] "RemoveContainer" containerID="f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.423231 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hvn5n" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.482160 4835 scope.go:117] "RemoveContainer" containerID="7fa8013a90cb6fae50ee277b2f211ecb695f41d384284cca9ccbb323869fe46d" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.491380 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hvn5n"] Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.500629 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hvn5n"] Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.509382 4835 scope.go:117] "RemoveContainer" containerID="6fcd4c575203745d0633bf30f127cc5c9a0c7726aeeb212c65b4ba583dde3676" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.550798 4835 scope.go:117] "RemoveContainer" containerID="f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a" Feb 02 17:35:22 crc kubenswrapper[4835]: E0202 17:35:22.551579 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a\": container with ID starting with f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a not found: ID does not exist" containerID="f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.551631 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a"} err="failed to get container status \"f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a\": rpc error: code = NotFound desc = could not find container \"f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a\": container with ID starting with f384b6f385a9a43b0cb5c27eccecf876936023578281d31807b34ca3806ee34a not found: ID does not exist" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.551664 4835 scope.go:117] "RemoveContainer" containerID="7fa8013a90cb6fae50ee277b2f211ecb695f41d384284cca9ccbb323869fe46d" Feb 02 17:35:22 crc kubenswrapper[4835]: E0202 17:35:22.553155 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fa8013a90cb6fae50ee277b2f211ecb695f41d384284cca9ccbb323869fe46d\": container with ID starting with 7fa8013a90cb6fae50ee277b2f211ecb695f41d384284cca9ccbb323869fe46d not found: ID does not exist" containerID="7fa8013a90cb6fae50ee277b2f211ecb695f41d384284cca9ccbb323869fe46d" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.553241 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fa8013a90cb6fae50ee277b2f211ecb695f41d384284cca9ccbb323869fe46d"} err="failed to get container status \"7fa8013a90cb6fae50ee277b2f211ecb695f41d384284cca9ccbb323869fe46d\": rpc error: code = NotFound desc = could not find container \"7fa8013a90cb6fae50ee277b2f211ecb695f41d384284cca9ccbb323869fe46d\": container with ID starting with 7fa8013a90cb6fae50ee277b2f211ecb695f41d384284cca9ccbb323869fe46d not found: ID does not exist" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.553374 4835 scope.go:117] "RemoveContainer" containerID="6fcd4c575203745d0633bf30f127cc5c9a0c7726aeeb212c65b4ba583dde3676" Feb 02 17:35:22 crc kubenswrapper[4835]: E0202 17:35:22.554207 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fcd4c575203745d0633bf30f127cc5c9a0c7726aeeb212c65b4ba583dde3676\": container with ID starting with 6fcd4c575203745d0633bf30f127cc5c9a0c7726aeeb212c65b4ba583dde3676 not found: ID does not exist" containerID="6fcd4c575203745d0633bf30f127cc5c9a0c7726aeeb212c65b4ba583dde3676" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.554250 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fcd4c575203745d0633bf30f127cc5c9a0c7726aeeb212c65b4ba583dde3676"} err="failed to get container status \"6fcd4c575203745d0633bf30f127cc5c9a0c7726aeeb212c65b4ba583dde3676\": rpc error: code = NotFound desc = could not find container \"6fcd4c575203745d0633bf30f127cc5c9a0c7726aeeb212c65b4ba583dde3676\": container with ID starting with 6fcd4c575203745d0633bf30f127cc5c9a0c7726aeeb212c65b4ba583dde3676 not found: ID does not exist" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.689104 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.689140 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:22 crc kubenswrapper[4835]: I0202 17:35:22.741939 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:23 crc kubenswrapper[4835]: I0202 17:35:23.201139 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" path="/var/lib/kubelet/pods/5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0/volumes" Feb 02 17:35:23 crc kubenswrapper[4835]: I0202 17:35:23.491624 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:24 crc kubenswrapper[4835]: I0202 17:35:24.360011 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kbf6q"] Feb 02 17:35:24 crc kubenswrapper[4835]: I0202 17:35:24.361487 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kbf6q" podUID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" containerName="registry-server" containerID="cri-o://060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02" gracePeriod=2 Feb 02 17:35:24 crc kubenswrapper[4835]: I0202 17:35:24.815043 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.005672 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zjzp\" (UniqueName: \"kubernetes.io/projected/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-kube-api-access-4zjzp\") pod \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.005869 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-catalog-content\") pod \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.005915 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-utilities\") pod \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\" (UID: \"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f\") " Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.006987 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-utilities" (OuterVolumeSpecName: "utilities") pod "98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" (UID: "98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.027726 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-kube-api-access-4zjzp" (OuterVolumeSpecName: "kube-api-access-4zjzp") pod "98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" (UID: "98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f"). InnerVolumeSpecName "kube-api-access-4zjzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.109663 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.109695 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zjzp\" (UniqueName: \"kubernetes.io/projected/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-kube-api-access-4zjzp\") on node \"crc\" DevicePath \"\"" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.137454 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" (UID: "98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.211792 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.452840 4835 generic.go:334] "Generic (PLEG): container finished" podID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" containerID="060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02" exitCode=0 Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.452886 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbf6q" event={"ID":"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f","Type":"ContainerDied","Data":"060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02"} Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.452930 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kbf6q" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.452966 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbf6q" event={"ID":"98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f","Type":"ContainerDied","Data":"477c6832bcbfdaa09c8fb4f322e2b40e784bfff5c0bb297eead7a769e57635ba"} Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.453008 4835 scope.go:117] "RemoveContainer" containerID="060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.474425 4835 scope.go:117] "RemoveContainer" containerID="1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.477508 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kbf6q"] Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.485503 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kbf6q"] Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.510984 4835 scope.go:117] "RemoveContainer" containerID="9155c7ddfbab0d2347e144d5ab6231fb1beec8f2befd9f318971d518f5aacb4a" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.539728 4835 scope.go:117] "RemoveContainer" containerID="060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02" Feb 02 17:35:25 crc kubenswrapper[4835]: E0202 17:35:25.544874 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02\": container with ID starting with 060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02 not found: ID does not exist" containerID="060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.544916 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02"} err="failed to get container status \"060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02\": rpc error: code = NotFound desc = could not find container \"060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02\": container with ID starting with 060ee37ab0f23b1d77030f2a9ac2546c97d6c23eb8c155f94ab24404c2f8ad02 not found: ID does not exist" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.544942 4835 scope.go:117] "RemoveContainer" containerID="1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f" Feb 02 17:35:25 crc kubenswrapper[4835]: E0202 17:35:25.545403 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f\": container with ID starting with 1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f not found: ID does not exist" containerID="1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.545448 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f"} err="failed to get container status \"1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f\": rpc error: code = NotFound desc = could not find container \"1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f\": container with ID starting with 1da6ac60257fa6422f7415b44096ca2d0625b1abe09962aeed4583c95906c91f not found: ID does not exist" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.545477 4835 scope.go:117] "RemoveContainer" containerID="9155c7ddfbab0d2347e144d5ab6231fb1beec8f2befd9f318971d518f5aacb4a" Feb 02 17:35:25 crc kubenswrapper[4835]: E0202 17:35:25.545822 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9155c7ddfbab0d2347e144d5ab6231fb1beec8f2befd9f318971d518f5aacb4a\": container with ID starting with 9155c7ddfbab0d2347e144d5ab6231fb1beec8f2befd9f318971d518f5aacb4a not found: ID does not exist" containerID="9155c7ddfbab0d2347e144d5ab6231fb1beec8f2befd9f318971d518f5aacb4a" Feb 02 17:35:25 crc kubenswrapper[4835]: I0202 17:35:25.545851 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9155c7ddfbab0d2347e144d5ab6231fb1beec8f2befd9f318971d518f5aacb4a"} err="failed to get container status \"9155c7ddfbab0d2347e144d5ab6231fb1beec8f2befd9f318971d518f5aacb4a\": rpc error: code = NotFound desc = could not find container \"9155c7ddfbab0d2347e144d5ab6231fb1beec8f2befd9f318971d518f5aacb4a\": container with ID starting with 9155c7ddfbab0d2347e144d5ab6231fb1beec8f2befd9f318971d518f5aacb4a not found: ID does not exist" Feb 02 17:35:26 crc kubenswrapper[4835]: I0202 17:35:26.947906 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xpxsk"] Feb 02 17:35:26 crc kubenswrapper[4835]: I0202 17:35:26.948200 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xpxsk" podUID="72c721db-bdc9-49ec-9380-5336da467219" containerName="registry-server" containerID="cri-o://de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65" gracePeriod=2 Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.218044 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" path="/var/lib/kubelet/pods/98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f/volumes" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.400033 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.474261 4835 generic.go:334] "Generic (PLEG): container finished" podID="72c721db-bdc9-49ec-9380-5336da467219" containerID="de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65" exitCode=0 Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.474330 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xpxsk" event={"ID":"72c721db-bdc9-49ec-9380-5336da467219","Type":"ContainerDied","Data":"de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65"} Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.474342 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xpxsk" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.474366 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xpxsk" event={"ID":"72c721db-bdc9-49ec-9380-5336da467219","Type":"ContainerDied","Data":"09e7f004d88f317d802c7c3fdcccf7f62880939b74c08e4ff34ed4e6460faf70"} Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.474387 4835 scope.go:117] "RemoveContainer" containerID="de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.491234 4835 scope.go:117] "RemoveContainer" containerID="4fcb32b0d7755f1d0cb9de10e20ee7c29981fa85732a8f08348a7d2f63c79c1a" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.507395 4835 scope.go:117] "RemoveContainer" containerID="d136306d4ba750cc04f7f0e10c49b23b67caadcdf85180c2ff3ca8cf2576fc14" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.552350 4835 scope.go:117] "RemoveContainer" containerID="de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65" Feb 02 17:35:27 crc kubenswrapper[4835]: E0202 17:35:27.552707 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65\": container with ID starting with de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65 not found: ID does not exist" containerID="de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.552739 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65"} err="failed to get container status \"de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65\": rpc error: code = NotFound desc = could not find container \"de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65\": container with ID starting with de3d598d03c7bcbfa7e95fcebd7f3194c57dc70029b99d4b917176c47bb35f65 not found: ID does not exist" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.552768 4835 scope.go:117] "RemoveContainer" containerID="4fcb32b0d7755f1d0cb9de10e20ee7c29981fa85732a8f08348a7d2f63c79c1a" Feb 02 17:35:27 crc kubenswrapper[4835]: E0202 17:35:27.553050 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fcb32b0d7755f1d0cb9de10e20ee7c29981fa85732a8f08348a7d2f63c79c1a\": container with ID starting with 4fcb32b0d7755f1d0cb9de10e20ee7c29981fa85732a8f08348a7d2f63c79c1a not found: ID does not exist" containerID="4fcb32b0d7755f1d0cb9de10e20ee7c29981fa85732a8f08348a7d2f63c79c1a" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.553083 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fcb32b0d7755f1d0cb9de10e20ee7c29981fa85732a8f08348a7d2f63c79c1a"} err="failed to get container status \"4fcb32b0d7755f1d0cb9de10e20ee7c29981fa85732a8f08348a7d2f63c79c1a\": rpc error: code = NotFound desc = could not find container \"4fcb32b0d7755f1d0cb9de10e20ee7c29981fa85732a8f08348a7d2f63c79c1a\": container with ID starting with 4fcb32b0d7755f1d0cb9de10e20ee7c29981fa85732a8f08348a7d2f63c79c1a not found: ID does not exist" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.553107 4835 scope.go:117] "RemoveContainer" containerID="d136306d4ba750cc04f7f0e10c49b23b67caadcdf85180c2ff3ca8cf2576fc14" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.553694 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-catalog-content\") pod \"72c721db-bdc9-49ec-9380-5336da467219\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.553927 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-utilities\") pod \"72c721db-bdc9-49ec-9380-5336da467219\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.553993 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4dxk\" (UniqueName: \"kubernetes.io/projected/72c721db-bdc9-49ec-9380-5336da467219-kube-api-access-x4dxk\") pod \"72c721db-bdc9-49ec-9380-5336da467219\" (UID: \"72c721db-bdc9-49ec-9380-5336da467219\") " Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.554684 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-utilities" (OuterVolumeSpecName: "utilities") pod "72c721db-bdc9-49ec-9380-5336da467219" (UID: "72c721db-bdc9-49ec-9380-5336da467219"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:35:27 crc kubenswrapper[4835]: E0202 17:35:27.555795 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d136306d4ba750cc04f7f0e10c49b23b67caadcdf85180c2ff3ca8cf2576fc14\": container with ID starting with d136306d4ba750cc04f7f0e10c49b23b67caadcdf85180c2ff3ca8cf2576fc14 not found: ID does not exist" containerID="d136306d4ba750cc04f7f0e10c49b23b67caadcdf85180c2ff3ca8cf2576fc14" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.555839 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d136306d4ba750cc04f7f0e10c49b23b67caadcdf85180c2ff3ca8cf2576fc14"} err="failed to get container status \"d136306d4ba750cc04f7f0e10c49b23b67caadcdf85180c2ff3ca8cf2576fc14\": rpc error: code = NotFound desc = could not find container \"d136306d4ba750cc04f7f0e10c49b23b67caadcdf85180c2ff3ca8cf2576fc14\": container with ID starting with d136306d4ba750cc04f7f0e10c49b23b67caadcdf85180c2ff3ca8cf2576fc14 not found: ID does not exist" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.560869 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72c721db-bdc9-49ec-9380-5336da467219-kube-api-access-x4dxk" (OuterVolumeSpecName: "kube-api-access-x4dxk") pod "72c721db-bdc9-49ec-9380-5336da467219" (UID: "72c721db-bdc9-49ec-9380-5336da467219"). InnerVolumeSpecName "kube-api-access-x4dxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.580877 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "72c721db-bdc9-49ec-9380-5336da467219" (UID: "72c721db-bdc9-49ec-9380-5336da467219"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.658335 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.658372 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4dxk\" (UniqueName: \"kubernetes.io/projected/72c721db-bdc9-49ec-9380-5336da467219-kube-api-access-x4dxk\") on node \"crc\" DevicePath \"\"" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.658386 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c721db-bdc9-49ec-9380-5336da467219-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.806405 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xpxsk"] Feb 02 17:35:27 crc kubenswrapper[4835]: I0202 17:35:27.816151 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xpxsk"] Feb 02 17:35:29 crc kubenswrapper[4835]: I0202 17:35:29.208192 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72c721db-bdc9-49ec-9380-5336da467219" path="/var/lib/kubelet/pods/72c721db-bdc9-49ec-9380-5336da467219/volumes" Feb 02 17:37:23 crc kubenswrapper[4835]: I0202 17:37:23.561794 4835 generic.go:334] "Generic (PLEG): container finished" podID="224a86ad-9920-4e35-8470-e48d3af63934" containerID="c1543e83a01118e564ba40d559adbc8cb6d9cbb44c8144930e3c41d54a4baf5c" exitCode=0 Feb 02 17:37:23 crc kubenswrapper[4835]: I0202 17:37:23.562238 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" event={"ID":"224a86ad-9920-4e35-8470-e48d3af63934","Type":"ContainerDied","Data":"c1543e83a01118e564ba40d559adbc8cb6d9cbb44c8144930e3c41d54a4baf5c"} Feb 02 17:37:24 crc kubenswrapper[4835]: I0202 17:37:24.989931 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.102070 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ceph\") pod \"224a86ad-9920-4e35-8470-e48d3af63934\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.102207 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-ceph-nova-0\") pod \"224a86ad-9920-4e35-8470-e48d3af63934\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.102232 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-1\") pod \"224a86ad-9920-4e35-8470-e48d3af63934\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.102295 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-0\") pod \"224a86ad-9920-4e35-8470-e48d3af63934\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.102326 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-custom-ceph-combined-ca-bundle\") pod \"224a86ad-9920-4e35-8470-e48d3af63934\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.102375 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr2zw\" (UniqueName: \"kubernetes.io/projected/224a86ad-9920-4e35-8470-e48d3af63934-kube-api-access-rr2zw\") pod \"224a86ad-9920-4e35-8470-e48d3af63934\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.102440 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-inventory\") pod \"224a86ad-9920-4e35-8470-e48d3af63934\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.102495 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ssh-key-openstack-edpm-ipam\") pod \"224a86ad-9920-4e35-8470-e48d3af63934\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.102516 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-nova-extra-config-0\") pod \"224a86ad-9920-4e35-8470-e48d3af63934\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.102609 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-0\") pod \"224a86ad-9920-4e35-8470-e48d3af63934\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.102637 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-1\") pod \"224a86ad-9920-4e35-8470-e48d3af63934\" (UID: \"224a86ad-9920-4e35-8470-e48d3af63934\") " Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.119651 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ceph" (OuterVolumeSpecName: "ceph") pod "224a86ad-9920-4e35-8470-e48d3af63934" (UID: "224a86ad-9920-4e35-8470-e48d3af63934"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.125882 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-custom-ceph-combined-ca-bundle" (OuterVolumeSpecName: "nova-custom-ceph-combined-ca-bundle") pod "224a86ad-9920-4e35-8470-e48d3af63934" (UID: "224a86ad-9920-4e35-8470-e48d3af63934"). InnerVolumeSpecName "nova-custom-ceph-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.125891 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/224a86ad-9920-4e35-8470-e48d3af63934-kube-api-access-rr2zw" (OuterVolumeSpecName: "kube-api-access-rr2zw") pod "224a86ad-9920-4e35-8470-e48d3af63934" (UID: "224a86ad-9920-4e35-8470-e48d3af63934"). InnerVolumeSpecName "kube-api-access-rr2zw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.136113 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-inventory" (OuterVolumeSpecName: "inventory") pod "224a86ad-9920-4e35-8470-e48d3af63934" (UID: "224a86ad-9920-4e35-8470-e48d3af63934"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.138049 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-ceph-nova-0" (OuterVolumeSpecName: "ceph-nova-0") pod "224a86ad-9920-4e35-8470-e48d3af63934" (UID: "224a86ad-9920-4e35-8470-e48d3af63934"). InnerVolumeSpecName "ceph-nova-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.138360 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "224a86ad-9920-4e35-8470-e48d3af63934" (UID: "224a86ad-9920-4e35-8470-e48d3af63934"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.141990 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "224a86ad-9920-4e35-8470-e48d3af63934" (UID: "224a86ad-9920-4e35-8470-e48d3af63934"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.144068 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "224a86ad-9920-4e35-8470-e48d3af63934" (UID: "224a86ad-9920-4e35-8470-e48d3af63934"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.145483 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "224a86ad-9920-4e35-8470-e48d3af63934" (UID: "224a86ad-9920-4e35-8470-e48d3af63934"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.145762 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "224a86ad-9920-4e35-8470-e48d3af63934" (UID: "224a86ad-9920-4e35-8470-e48d3af63934"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.150931 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "224a86ad-9920-4e35-8470-e48d3af63934" (UID: "224a86ad-9920-4e35-8470-e48d3af63934"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.205391 4835 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.205431 4835 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.205446 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.205459 4835 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.205472 4835 reconciler_common.go:293] "Volume detached for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-ceph-nova-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.205483 4835 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.205499 4835 reconciler_common.go:293] "Volume detached for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-nova-custom-ceph-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.205513 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr2zw\" (UniqueName: \"kubernetes.io/projected/224a86ad-9920-4e35-8470-e48d3af63934-kube-api-access-rr2zw\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.205526 4835 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.205537 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/224a86ad-9920-4e35-8470-e48d3af63934-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.205548 4835 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/224a86ad-9920-4e35-8470-e48d3af63934-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.583349 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" event={"ID":"224a86ad-9920-4e35-8470-e48d3af63934","Type":"ContainerDied","Data":"5a7b5b15af4b1f404bd22c584de0011173e5d1aa72d5102993f6301625542c28"} Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.583425 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a7b5b15af4b1f404bd22c584de0011173e5d1aa72d5102993f6301625542c28" Feb 02 17:37:25 crc kubenswrapper[4835]: I0202 17:37:25.583453 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.916366 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Feb 02 17:37:38 crc kubenswrapper[4835]: E0202 17:37:38.917192 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="224a86ad-9920-4e35-8470-e48d3af63934" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917208 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="224a86ad-9920-4e35-8470-e48d3af63934" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Feb 02 17:37:38 crc kubenswrapper[4835]: E0202 17:37:38.917220 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" containerName="extract-utilities" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917227 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" containerName="extract-utilities" Feb 02 17:37:38 crc kubenswrapper[4835]: E0202 17:37:38.917242 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72c721db-bdc9-49ec-9380-5336da467219" containerName="registry-server" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917248 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="72c721db-bdc9-49ec-9380-5336da467219" containerName="registry-server" Feb 02 17:37:38 crc kubenswrapper[4835]: E0202 17:37:38.917258 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" containerName="extract-content" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917264 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" containerName="extract-content" Feb 02 17:37:38 crc kubenswrapper[4835]: E0202 17:37:38.917349 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72c721db-bdc9-49ec-9380-5336da467219" containerName="extract-utilities" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917356 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="72c721db-bdc9-49ec-9380-5336da467219" containerName="extract-utilities" Feb 02 17:37:38 crc kubenswrapper[4835]: E0202 17:37:38.917375 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" containerName="registry-server" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917381 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" containerName="registry-server" Feb 02 17:37:38 crc kubenswrapper[4835]: E0202 17:37:38.917392 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" containerName="extract-utilities" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917397 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" containerName="extract-utilities" Feb 02 17:37:38 crc kubenswrapper[4835]: E0202 17:37:38.917407 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72c721db-bdc9-49ec-9380-5336da467219" containerName="extract-content" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917430 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="72c721db-bdc9-49ec-9380-5336da467219" containerName="extract-content" Feb 02 17:37:38 crc kubenswrapper[4835]: E0202 17:37:38.917440 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" containerName="registry-server" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917446 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" containerName="registry-server" Feb 02 17:37:38 crc kubenswrapper[4835]: E0202 17:37:38.917455 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" containerName="extract-content" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917461 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" containerName="extract-content" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917624 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="72c721db-bdc9-49ec-9380-5336da467219" containerName="registry-server" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917635 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="98a36f40-50eb-4c5e-8b4e-7b15fef1fa0f" containerName="registry-server" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917647 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f1ea5e5-fd19-47d0-9b6e-21b5682a87e0" containerName="registry-server" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.917662 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="224a86ad-9920-4e35-8470-e48d3af63934" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.918521 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.920162 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.920213 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.927413 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.928806 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.930435 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.939565 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.976853 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-dev\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.976921 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-etc-nvme\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977005 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ef67b40a-7472-4011-95ad-4713b23bf160-ceph\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977027 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977081 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-dev\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977106 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977129 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-lib-modules\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977147 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-config-data-custom\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977192 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977229 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977248 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-sys\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977266 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977322 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977358 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjz2q\" (UniqueName: \"kubernetes.io/projected/ef67b40a-7472-4011-95ad-4713b23bf160-kube-api-access-wjz2q\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977384 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977411 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977477 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977503 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkplk\" (UniqueName: \"kubernetes.io/projected/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-kube-api-access-nkplk\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977546 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-scripts\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977610 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977635 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977700 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977723 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-run\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977740 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977764 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977783 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977806 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-config-data\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977834 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977902 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-sys\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977945 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.977972 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-run\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.978001 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:38 crc kubenswrapper[4835]: I0202 17:37:38.978514 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.085566 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.085744 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjz2q\" (UniqueName: \"kubernetes.io/projected/ef67b40a-7472-4011-95ad-4713b23bf160-kube-api-access-wjz2q\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.085774 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.085799 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.085832 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.085853 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkplk\" (UniqueName: \"kubernetes.io/projected/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-kube-api-access-nkplk\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.085889 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-scripts\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.085926 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.085950 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.085988 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086008 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-run\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086028 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086052 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086064 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086073 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-config-data\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086119 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-run\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086131 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086146 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086186 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086115 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086206 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086185 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086303 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-sys\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086350 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086380 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-run\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086410 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086410 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-sys\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086446 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-dev\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086462 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-run\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086473 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-etc-nvme\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086492 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086549 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ef67b40a-7472-4011-95ad-4713b23bf160-ceph\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086566 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086568 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086605 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086617 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-etc-nvme\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086633 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086616 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086647 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-dev\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086657 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-dev\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086678 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086692 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-lib-modules\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086706 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-dev\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086708 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-config-data-custom\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086747 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086785 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086804 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-sys\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086823 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.086981 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-lib-modules\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.087023 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ef67b40a-7472-4011-95ad-4713b23bf160-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.087052 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.087072 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-sys\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.091969 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.092387 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.092519 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-config-data-custom\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.092711 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.093367 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.094090 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-config-data\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.094919 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.095484 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.097206 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef67b40a-7472-4011-95ad-4713b23bf160-scripts\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.104417 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkplk\" (UniqueName: \"kubernetes.io/projected/4b1fb0f8-db78-42d9-82e2-c0dcda0cd231-kube-api-access-nkplk\") pod \"cinder-volume-volume1-0\" (UID: \"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231\") " pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.106361 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ef67b40a-7472-4011-95ad-4713b23bf160-ceph\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.106733 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjz2q\" (UniqueName: \"kubernetes.io/projected/ef67b40a-7472-4011-95ad-4713b23bf160-kube-api-access-wjz2q\") pod \"cinder-backup-0\" (UID: \"ef67b40a-7472-4011-95ad-4713b23bf160\") " pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.286167 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.311998 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.476560 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-f6xwq"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.478218 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-f6xwq" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.495221 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-f6xwq"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.577998 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-1a71-account-create-update-8g9t9"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.579596 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-1a71-account-create-update-8g9t9" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.585666 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.598518 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bccws\" (UniqueName: \"kubernetes.io/projected/5884d13a-333b-48c2-9e73-0f0e3369a932-kube-api-access-bccws\") pod \"manila-db-create-f6xwq\" (UID: \"5884d13a-333b-48c2-9e73-0f0e3369a932\") " pod="openstack/manila-db-create-f6xwq" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.598622 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5884d13a-333b-48c2-9e73-0f0e3369a932-operator-scripts\") pod \"manila-db-create-f6xwq\" (UID: \"5884d13a-333b-48c2-9e73-0f0e3369a932\") " pod="openstack/manila-db-create-f6xwq" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.607349 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7787d795cf-xvfgz"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.609551 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.614916 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.615187 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.615346 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.615457 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-llgmm" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.620385 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-1a71-account-create-update-8g9t9"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.635835 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7787d795cf-xvfgz"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.700225 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-scripts\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.700263 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-operator-scripts\") pod \"manila-1a71-account-create-update-8g9t9\" (UID: \"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60\") " pod="openstack/manila-1a71-account-create-update-8g9t9" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.700301 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-config-data\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.700327 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps9db\" (UniqueName: \"kubernetes.io/projected/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-kube-api-access-ps9db\") pod \"manila-1a71-account-create-update-8g9t9\" (UID: \"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60\") " pod="openstack/manila-1a71-account-create-update-8g9t9" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.700364 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b89e0428-ff08-413f-aad7-6686319cf0fd-horizon-secret-key\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.700400 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5884d13a-333b-48c2-9e73-0f0e3369a932-operator-scripts\") pod \"manila-db-create-f6xwq\" (UID: \"5884d13a-333b-48c2-9e73-0f0e3369a932\") " pod="openstack/manila-db-create-f6xwq" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.700428 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b89e0428-ff08-413f-aad7-6686319cf0fd-logs\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.700451 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj55q\" (UniqueName: \"kubernetes.io/projected/b89e0428-ff08-413f-aad7-6686319cf0fd-kube-api-access-rj55q\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.700514 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bccws\" (UniqueName: \"kubernetes.io/projected/5884d13a-333b-48c2-9e73-0f0e3369a932-kube-api-access-bccws\") pod \"manila-db-create-f6xwq\" (UID: \"5884d13a-333b-48c2-9e73-0f0e3369a932\") " pod="openstack/manila-db-create-f6xwq" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.701540 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5884d13a-333b-48c2-9e73-0f0e3369a932-operator-scripts\") pod \"manila-db-create-f6xwq\" (UID: \"5884d13a-333b-48c2-9e73-0f0e3369a932\") " pod="openstack/manila-db-create-f6xwq" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.715398 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.716910 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.724074 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7db7c95659-6vg2h"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.724347 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.724434 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.727477 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-pfsqd" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.727624 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.728486 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.735011 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7db7c95659-6vg2h"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.735941 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bccws\" (UniqueName: \"kubernetes.io/projected/5884d13a-333b-48c2-9e73-0f0e3369a932-kube-api-access-bccws\") pod \"manila-db-create-f6xwq\" (UID: \"5884d13a-333b-48c2-9e73-0f0e3369a932\") " pod="openstack/manila-db-create-f6xwq" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.740492 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.777527 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.779602 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.782987 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.783011 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802472 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps9db\" (UniqueName: \"kubernetes.io/projected/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-kube-api-access-ps9db\") pod \"manila-1a71-account-create-update-8g9t9\" (UID: \"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60\") " pod="openstack/manila-1a71-account-create-update-8g9t9" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802517 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-config-data\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802553 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b89e0428-ff08-413f-aad7-6686319cf0fd-horizon-secret-key\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802573 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802592 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86972842-42a5-46ce-b163-62b8f57571d3-logs\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802613 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/86972842-42a5-46ce-b163-62b8f57571d3-horizon-secret-key\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802633 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8zrg\" (UniqueName: \"kubernetes.io/projected/86972842-42a5-46ce-b163-62b8f57571d3-kube-api-access-q8zrg\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802657 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-ceph\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802675 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802689 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802706 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b89e0428-ff08-413f-aad7-6686319cf0fd-logs\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802722 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802741 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-logs\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802763 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj55q\" (UniqueName: \"kubernetes.io/projected/b89e0428-ff08-413f-aad7-6686319cf0fd-kube-api-access-rj55q\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802778 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzm4f\" (UniqueName: \"kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-kube-api-access-kzm4f\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802796 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-scripts\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802827 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-scripts\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802851 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-config-data\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802911 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-scripts\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802929 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-operator-scripts\") pod \"manila-1a71-account-create-update-8g9t9\" (UID: \"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60\") " pod="openstack/manila-1a71-account-create-update-8g9t9" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.802945 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-config-data\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.803976 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.804681 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b89e0428-ff08-413f-aad7-6686319cf0fd-logs\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.805530 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-scripts\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.805863 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-operator-scripts\") pod \"manila-1a71-account-create-update-8g9t9\" (UID: \"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60\") " pod="openstack/manila-1a71-account-create-update-8g9t9" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.806981 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b89e0428-ff08-413f-aad7-6686319cf0fd-horizon-secret-key\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.806994 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-config-data\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.819110 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-f6xwq" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.827983 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps9db\" (UniqueName: \"kubernetes.io/projected/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-kube-api-access-ps9db\") pod \"manila-1a71-account-create-update-8g9t9\" (UID: \"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60\") " pod="openstack/manila-1a71-account-create-update-8g9t9" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.831634 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj55q\" (UniqueName: \"kubernetes.io/projected/b89e0428-ff08-413f-aad7-6686319cf0fd-kube-api-access-rj55q\") pod \"horizon-7787d795cf-xvfgz\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904662 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904718 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86972842-42a5-46ce-b163-62b8f57571d3-logs\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904744 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/86972842-42a5-46ce-b163-62b8f57571d3-horizon-secret-key\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904764 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm45w\" (UniqueName: \"kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-kube-api-access-qm45w\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904786 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8zrg\" (UniqueName: \"kubernetes.io/projected/86972842-42a5-46ce-b163-62b8f57571d3-kube-api-access-q8zrg\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904809 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904828 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-ceph\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904845 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904862 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904878 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904896 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-logs\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904917 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzm4f\" (UniqueName: \"kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-kube-api-access-kzm4f\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904939 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-scripts\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904958 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.904975 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-ceph\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.905005 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-scripts\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.905022 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.905046 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-config-data\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.905074 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.905106 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.905150 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.905174 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-config-data\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.905201 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.911727 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-logs\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.912981 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.914049 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86972842-42a5-46ce-b163-62b8f57571d3-logs\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.914876 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.918705 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-scripts\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.919554 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-config-data\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.920298 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.921527 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-1a71-account-create-update-8g9t9" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.925183 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/86972842-42a5-46ce-b163-62b8f57571d3-horizon-secret-key\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.929292 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-config-data\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.932189 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-ceph\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.933037 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-scripts\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.934550 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.936911 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8zrg\" (UniqueName: \"kubernetes.io/projected/86972842-42a5-46ce-b163-62b8f57571d3-kube-api-access-q8zrg\") pod \"horizon-7db7c95659-6vg2h\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.938955 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzm4f\" (UniqueName: \"kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-kube-api-access-kzm4f\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.944830 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:37:39 crc kubenswrapper[4835]: I0202 17:37:39.979636 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.006707 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.006757 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.006802 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.006836 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.006866 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm45w\" (UniqueName: \"kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-kube-api-access-qm45w\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.006885 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.006927 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.006942 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-ceph\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.006971 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.007400 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.007597 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.010347 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.014846 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.017677 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.019984 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.020599 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-ceph\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.032137 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.042676 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.043412 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm45w\" (UniqueName: \"kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-kube-api-access-qm45w\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.059800 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.063481 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.116137 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.271866 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.438382 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.480804 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-f6xwq"] Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.532849 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-1a71-account-create-update-8g9t9"] Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.744259 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-f6xwq" event={"ID":"5884d13a-333b-48c2-9e73-0f0e3369a932","Type":"ContainerStarted","Data":"9268fe0dfdcb8f901b3039975117c4b85d315554c3a3cd1bfb0857c841b306af"} Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.752329 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-1a71-account-create-update-8g9t9" event={"ID":"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60","Type":"ContainerStarted","Data":"490f3467aa3c78ccfd60523db4df8f89ce9d16bb445fe8ef18eba8ae3b554802"} Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.752371 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-1a71-account-create-update-8g9t9" event={"ID":"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60","Type":"ContainerStarted","Data":"7bd17387b0836a4fc081956b1f2fef4b53acaf82bdb8b195dbed5fa6d2453108"} Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.754006 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"ef67b40a-7472-4011-95ad-4713b23bf160","Type":"ContainerStarted","Data":"d34df6fbf453ee8575d0f6b99a772b83144b8a76d07e214a99bc86205543e232"} Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.768333 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231","Type":"ContainerStarted","Data":"a8c9c001c13cf8b177b3bbe51de7f61cfb2333ffe66b898b1e2601b9be169ebf"} Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.777698 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7787d795cf-xvfgz"] Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.782787 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-1a71-account-create-update-8g9t9" podStartSLOduration=1.782769333 podStartE2EDuration="1.782769333s" podCreationTimestamp="2026-02-02 17:37:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:37:40.779401788 +0000 UTC m=+2852.401005888" watchObservedRunningTime="2026-02-02 17:37:40.782769333 +0000 UTC m=+2852.404373423" Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.849253 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7db7c95659-6vg2h"] Feb 02 17:37:40 crc kubenswrapper[4835]: I0202 17:37:40.946729 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 17:37:41 crc kubenswrapper[4835]: I0202 17:37:41.030739 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 17:37:41 crc kubenswrapper[4835]: I0202 17:37:41.802416 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db7c95659-6vg2h" event={"ID":"86972842-42a5-46ce-b163-62b8f57571d3","Type":"ContainerStarted","Data":"e2e3489ec76605f638987443a804f2f0851b3f6d170ef4e4fa3fdfc0796a9d77"} Feb 02 17:37:41 crc kubenswrapper[4835]: I0202 17:37:41.815760 4835 generic.go:334] "Generic (PLEG): container finished" podID="5884d13a-333b-48c2-9e73-0f0e3369a932" containerID="c5230a66094f067556c747acab5b969e6dd2e93843de54cadd0468bfb996aa39" exitCode=0 Feb 02 17:37:41 crc kubenswrapper[4835]: I0202 17:37:41.815877 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-f6xwq" event={"ID":"5884d13a-333b-48c2-9e73-0f0e3369a932","Type":"ContainerDied","Data":"c5230a66094f067556c747acab5b969e6dd2e93843de54cadd0468bfb996aa39"} Feb 02 17:37:41 crc kubenswrapper[4835]: I0202 17:37:41.823039 4835 generic.go:334] "Generic (PLEG): container finished" podID="e6cf38f7-c12c-455a-a5f8-7f1d797f9a60" containerID="490f3467aa3c78ccfd60523db4df8f89ce9d16bb445fe8ef18eba8ae3b554802" exitCode=0 Feb 02 17:37:41 crc kubenswrapper[4835]: I0202 17:37:41.823094 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-1a71-account-create-update-8g9t9" event={"ID":"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60","Type":"ContainerDied","Data":"490f3467aa3c78ccfd60523db4df8f89ce9d16bb445fe8ef18eba8ae3b554802"} Feb 02 17:37:41 crc kubenswrapper[4835]: I0202 17:37:41.833284 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eeeb4621-9a31-4c57-9c26-365af4a73cd7","Type":"ContainerStarted","Data":"66f4ecb9402ec3ecbcb24e7a275f6c2f5e7e1aacbba490ad7d567fc70807c8b9"} Feb 02 17:37:41 crc kubenswrapper[4835]: I0202 17:37:41.869251 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7787d795cf-xvfgz" event={"ID":"b89e0428-ff08-413f-aad7-6686319cf0fd","Type":"ContainerStarted","Data":"2cbb5faed0f216b2ebd5ac55d622e186fa42db5efc09c16c93ea6ef67214e608"} Feb 02 17:37:41 crc kubenswrapper[4835]: I0202 17:37:41.875247 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2062c8b-2745-48ab-a93c-a8dd1847f0b9","Type":"ContainerStarted","Data":"bd9f7627ecb095e339bbf52b3a9b816c45cfd07880b8ad8cc6de22b03f6da3da"} Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.343603 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7787d795cf-xvfgz"] Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.390851 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-56ddff97fb-66qgb"] Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.392463 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.396571 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.403039 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-56ddff97fb-66qgb"] Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.421680 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.432752 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7db7c95659-6vg2h"] Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.469746 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5f575cdbb6-2fppg"] Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.476073 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.490502 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-config-data\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.490541 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-combined-ca-bundle\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.490615 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzqpq\" (UniqueName: \"kubernetes.io/projected/22b32279-2087-4a32-84ac-38c8b84d6a4d-kube-api-access-gzqpq\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.490660 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-secret-key\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.490688 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22b32279-2087-4a32-84ac-38c8b84d6a4d-logs\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.490717 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-scripts\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.490743 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-tls-certs\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.515945 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f575cdbb6-2fppg"] Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.551146 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.592025 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-horizon-tls-certs\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.592081 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzqpq\" (UniqueName: \"kubernetes.io/projected/22b32279-2087-4a32-84ac-38c8b84d6a4d-kube-api-access-gzqpq\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.592105 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk498\" (UniqueName: \"kubernetes.io/projected/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-kube-api-access-fk498\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.592130 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-horizon-secret-key\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.592560 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-secret-key\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.592807 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22b32279-2087-4a32-84ac-38c8b84d6a4d-logs\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.592836 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-logs\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.592967 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-scripts\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.592995 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-tls-certs\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.593145 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-scripts\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.593183 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-config-data\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.593534 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-config-data\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.593558 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-combined-ca-bundle\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.593682 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-combined-ca-bundle\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.595144 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-config-data\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.595410 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22b32279-2087-4a32-84ac-38c8b84d6a4d-logs\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.595799 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-scripts\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.600381 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-secret-key\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.602683 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-tls-certs\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.609663 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-combined-ca-bundle\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.610122 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzqpq\" (UniqueName: \"kubernetes.io/projected/22b32279-2087-4a32-84ac-38c8b84d6a4d-kube-api-access-gzqpq\") pod \"horizon-56ddff97fb-66qgb\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.694064 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.695516 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk498\" (UniqueName: \"kubernetes.io/projected/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-kube-api-access-fk498\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.695581 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-horizon-secret-key\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.695963 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-logs\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.696052 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-scripts\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.696106 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-config-data\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.696141 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-combined-ca-bundle\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.696229 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-horizon-tls-certs\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.697518 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-logs\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.698537 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-config-data\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.698888 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-scripts\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.701568 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-horizon-tls-certs\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.701796 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-combined-ca-bundle\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.708943 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-horizon-secret-key\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.713015 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk498\" (UniqueName: \"kubernetes.io/projected/fec30fb3-23dc-4443-a90f-4fb8defb3a1f-kube-api-access-fk498\") pod \"horizon-5f575cdbb6-2fppg\" (UID: \"fec30fb3-23dc-4443-a90f-4fb8defb3a1f\") " pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.930766 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231","Type":"ContainerStarted","Data":"471a2e8a367420da5ba5dd9abfd35a8fb848d3bc7d610e56ec4c8b25a8dbe188"} Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.931048 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"4b1fb0f8-db78-42d9-82e2-c0dcda0cd231","Type":"ContainerStarted","Data":"e6c76a426036dc9a2fdef49ca8146c3ef0813c9c105285b26042d2a18e435787"} Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.936716 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2062c8b-2745-48ab-a93c-a8dd1847f0b9","Type":"ContainerStarted","Data":"84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40"} Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.936750 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2062c8b-2745-48ab-a93c-a8dd1847f0b9","Type":"ContainerStarted","Data":"53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8"} Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.936855 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f2062c8b-2745-48ab-a93c-a8dd1847f0b9" containerName="glance-log" containerID="cri-o://53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8" gracePeriod=30 Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.937053 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f2062c8b-2745-48ab-a93c-a8dd1847f0b9" containerName="glance-httpd" containerID="cri-o://84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40" gracePeriod=30 Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.956330 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=4.041821416 podStartE2EDuration="4.956315273s" podCreationTimestamp="2026-02-02 17:37:38 +0000 UTC" firstStartedPulling="2026-02-02 17:37:40.471212793 +0000 UTC m=+2852.092816873" lastFinishedPulling="2026-02-02 17:37:41.38570666 +0000 UTC m=+2853.007310730" observedRunningTime="2026-02-02 17:37:42.953198075 +0000 UTC m=+2854.574802155" watchObservedRunningTime="2026-02-02 17:37:42.956315273 +0000 UTC m=+2854.577919373" Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.971662 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"ef67b40a-7472-4011-95ad-4713b23bf160","Type":"ContainerStarted","Data":"6958f1ceac3c9b276f9ae1202f9407af0f15924c10107603015eddd810617881"} Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.971709 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"ef67b40a-7472-4011-95ad-4713b23bf160","Type":"ContainerStarted","Data":"5a0868817efb075e1a569042503e9330f7b873e39dd568263edc97ed63efc97e"} Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.976714 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eeeb4621-9a31-4c57-9c26-365af4a73cd7","Type":"ContainerStarted","Data":"95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351"} Feb 02 17:37:42 crc kubenswrapper[4835]: I0202 17:37:42.994998 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.99497457 podStartE2EDuration="3.99497457s" podCreationTimestamp="2026-02-02 17:37:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:37:42.977911946 +0000 UTC m=+2854.599516026" watchObservedRunningTime="2026-02-02 17:37:42.99497457 +0000 UTC m=+2854.616578650" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.003218 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.060994 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=4.034899151 podStartE2EDuration="5.060974683s" podCreationTimestamp="2026-02-02 17:37:38 +0000 UTC" firstStartedPulling="2026-02-02 17:37:40.358037612 +0000 UTC m=+2851.979641692" lastFinishedPulling="2026-02-02 17:37:41.384113124 +0000 UTC m=+2853.005717224" observedRunningTime="2026-02-02 17:37:43.010207752 +0000 UTC m=+2854.631811822" watchObservedRunningTime="2026-02-02 17:37:43.060974683 +0000 UTC m=+2854.682578763" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.069154 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-56ddff97fb-66qgb"] Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.576151 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-1a71-account-create-update-8g9t9" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.592006 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-f6xwq" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.615236 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps9db\" (UniqueName: \"kubernetes.io/projected/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-kube-api-access-ps9db\") pod \"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60\" (UID: \"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.615460 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-operator-scripts\") pod \"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60\" (UID: \"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.616638 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e6cf38f7-c12c-455a-a5f8-7f1d797f9a60" (UID: "e6cf38f7-c12c-455a-a5f8-7f1d797f9a60"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.625606 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-kube-api-access-ps9db" (OuterVolumeSpecName: "kube-api-access-ps9db") pod "e6cf38f7-c12c-455a-a5f8-7f1d797f9a60" (UID: "e6cf38f7-c12c-455a-a5f8-7f1d797f9a60"). InnerVolumeSpecName "kube-api-access-ps9db". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.675821 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f575cdbb6-2fppg"] Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.717314 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bccws\" (UniqueName: \"kubernetes.io/projected/5884d13a-333b-48c2-9e73-0f0e3369a932-kube-api-access-bccws\") pod \"5884d13a-333b-48c2-9e73-0f0e3369a932\" (UID: \"5884d13a-333b-48c2-9e73-0f0e3369a932\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.717416 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5884d13a-333b-48c2-9e73-0f0e3369a932-operator-scripts\") pod \"5884d13a-333b-48c2-9e73-0f0e3369a932\" (UID: \"5884d13a-333b-48c2-9e73-0f0e3369a932\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.717855 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps9db\" (UniqueName: \"kubernetes.io/projected/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-kube-api-access-ps9db\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.717866 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.718474 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5884d13a-333b-48c2-9e73-0f0e3369a932-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5884d13a-333b-48c2-9e73-0f0e3369a932" (UID: "5884d13a-333b-48c2-9e73-0f0e3369a932"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.723283 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5884d13a-333b-48c2-9e73-0f0e3369a932-kube-api-access-bccws" (OuterVolumeSpecName: "kube-api-access-bccws") pod "5884d13a-333b-48c2-9e73-0f0e3369a932" (UID: "5884d13a-333b-48c2-9e73-0f0e3369a932"). InnerVolumeSpecName "kube-api-access-bccws". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.775666 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.818787 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-ceph\") pod \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.818844 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-config-data\") pod \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.818862 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-logs\") pod \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.818912 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-internal-tls-certs\") pod \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.818937 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.818987 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-httpd-run\") pod \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.819008 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-scripts\") pod \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.819027 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm45w\" (UniqueName: \"kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-kube-api-access-qm45w\") pod \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.819137 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-combined-ca-bundle\") pod \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\" (UID: \"f2062c8b-2745-48ab-a93c-a8dd1847f0b9\") " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.819544 4835 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5884d13a-333b-48c2-9e73-0f0e3369a932-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.819558 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bccws\" (UniqueName: \"kubernetes.io/projected/5884d13a-333b-48c2-9e73-0f0e3369a932-kube-api-access-bccws\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.821665 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f2062c8b-2745-48ab-a93c-a8dd1847f0b9" (UID: "f2062c8b-2745-48ab-a93c-a8dd1847f0b9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.822819 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-logs" (OuterVolumeSpecName: "logs") pod "f2062c8b-2745-48ab-a93c-a8dd1847f0b9" (UID: "f2062c8b-2745-48ab-a93c-a8dd1847f0b9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.828483 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-scripts" (OuterVolumeSpecName: "scripts") pod "f2062c8b-2745-48ab-a93c-a8dd1847f0b9" (UID: "f2062c8b-2745-48ab-a93c-a8dd1847f0b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.834623 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "f2062c8b-2745-48ab-a93c-a8dd1847f0b9" (UID: "f2062c8b-2745-48ab-a93c-a8dd1847f0b9"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.838079 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-ceph" (OuterVolumeSpecName: "ceph") pod "f2062c8b-2745-48ab-a93c-a8dd1847f0b9" (UID: "f2062c8b-2745-48ab-a93c-a8dd1847f0b9"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.841819 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-kube-api-access-qm45w" (OuterVolumeSpecName: "kube-api-access-qm45w") pod "f2062c8b-2745-48ab-a93c-a8dd1847f0b9" (UID: "f2062c8b-2745-48ab-a93c-a8dd1847f0b9"). InnerVolumeSpecName "kube-api-access-qm45w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.867852 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2062c8b-2745-48ab-a93c-a8dd1847f0b9" (UID: "f2062c8b-2745-48ab-a93c-a8dd1847f0b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.917381 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f2062c8b-2745-48ab-a93c-a8dd1847f0b9" (UID: "f2062c8b-2745-48ab-a93c-a8dd1847f0b9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.922050 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.922081 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.922093 4835 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.922131 4835 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.922144 4835 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.922154 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.922165 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm45w\" (UniqueName: \"kubernetes.io/projected/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-kube-api-access-qm45w\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.922178 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.933044 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-config-data" (OuterVolumeSpecName: "config-data") pod "f2062c8b-2745-48ab-a93c-a8dd1847f0b9" (UID: "f2062c8b-2745-48ab-a93c-a8dd1847f0b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:43 crc kubenswrapper[4835]: I0202 17:37:43.948967 4835 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.002838 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eeeb4621-9a31-4c57-9c26-365af4a73cd7","Type":"ContainerStarted","Data":"80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06"} Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.002959 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="eeeb4621-9a31-4c57-9c26-365af4a73cd7" containerName="glance-log" containerID="cri-o://95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351" gracePeriod=30 Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.002993 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="eeeb4621-9a31-4c57-9c26-365af4a73cd7" containerName="glance-httpd" containerID="cri-o://80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06" gracePeriod=30 Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.017529 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56ddff97fb-66qgb" event={"ID":"22b32279-2087-4a32-84ac-38c8b84d6a4d","Type":"ContainerStarted","Data":"bc6a4c1441995e1acab1285b4f85666e78c0342aa8e3adbb5dec578edad0c90e"} Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.023553 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2062c8b-2745-48ab-a93c-a8dd1847f0b9-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.023585 4835 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.023931 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f575cdbb6-2fppg" event={"ID":"fec30fb3-23dc-4443-a90f-4fb8defb3a1f","Type":"ContainerStarted","Data":"57651514747db80da4b5739bd0a1d3663756366e73ac8c72ee38bb608bead058"} Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.035499 4835 generic.go:334] "Generic (PLEG): container finished" podID="f2062c8b-2745-48ab-a93c-a8dd1847f0b9" containerID="84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40" exitCode=143 Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.035547 4835 generic.go:334] "Generic (PLEG): container finished" podID="f2062c8b-2745-48ab-a93c-a8dd1847f0b9" containerID="53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8" exitCode=143 Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.035655 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2062c8b-2745-48ab-a93c-a8dd1847f0b9","Type":"ContainerDied","Data":"84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40"} Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.035688 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2062c8b-2745-48ab-a93c-a8dd1847f0b9","Type":"ContainerDied","Data":"53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8"} Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.035703 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2062c8b-2745-48ab-a93c-a8dd1847f0b9","Type":"ContainerDied","Data":"bd9f7627ecb095e339bbf52b3a9b816c45cfd07880b8ad8cc6de22b03f6da3da"} Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.035721 4835 scope.go:117] "RemoveContainer" containerID="84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.035896 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.046334 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.04631589 podStartE2EDuration="5.04631589s" podCreationTimestamp="2026-02-02 17:37:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:37:44.022605368 +0000 UTC m=+2855.644209468" watchObservedRunningTime="2026-02-02 17:37:44.04631589 +0000 UTC m=+2855.667919970" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.046674 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-f6xwq" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.046826 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-f6xwq" event={"ID":"5884d13a-333b-48c2-9e73-0f0e3369a932","Type":"ContainerDied","Data":"9268fe0dfdcb8f901b3039975117c4b85d315554c3a3cd1bfb0857c841b306af"} Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.046875 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9268fe0dfdcb8f901b3039975117c4b85d315554c3a3cd1bfb0857c841b306af" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.050700 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-1a71-account-create-update-8g9t9" event={"ID":"e6cf38f7-c12c-455a-a5f8-7f1d797f9a60","Type":"ContainerDied","Data":"7bd17387b0836a4fc081956b1f2fef4b53acaf82bdb8b195dbed5fa6d2453108"} Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.050743 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bd17387b0836a4fc081956b1f2fef4b53acaf82bdb8b195dbed5fa6d2453108" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.051117 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-1a71-account-create-update-8g9t9" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.094586 4835 scope.go:117] "RemoveContainer" containerID="53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.117031 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.129256 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.142953 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 17:37:44 crc kubenswrapper[4835]: E0202 17:37:44.143507 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2062c8b-2745-48ab-a93c-a8dd1847f0b9" containerName="glance-log" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.143525 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2062c8b-2745-48ab-a93c-a8dd1847f0b9" containerName="glance-log" Feb 02 17:37:44 crc kubenswrapper[4835]: E0202 17:37:44.143539 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2062c8b-2745-48ab-a93c-a8dd1847f0b9" containerName="glance-httpd" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.143547 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2062c8b-2745-48ab-a93c-a8dd1847f0b9" containerName="glance-httpd" Feb 02 17:37:44 crc kubenswrapper[4835]: E0202 17:37:44.143570 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6cf38f7-c12c-455a-a5f8-7f1d797f9a60" containerName="mariadb-account-create-update" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.143578 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6cf38f7-c12c-455a-a5f8-7f1d797f9a60" containerName="mariadb-account-create-update" Feb 02 17:37:44 crc kubenswrapper[4835]: E0202 17:37:44.143592 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5884d13a-333b-48c2-9e73-0f0e3369a932" containerName="mariadb-database-create" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.143599 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5884d13a-333b-48c2-9e73-0f0e3369a932" containerName="mariadb-database-create" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.143815 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6cf38f7-c12c-455a-a5f8-7f1d797f9a60" containerName="mariadb-account-create-update" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.143840 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="5884d13a-333b-48c2-9e73-0f0e3369a932" containerName="mariadb-database-create" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.143854 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2062c8b-2745-48ab-a93c-a8dd1847f0b9" containerName="glance-httpd" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.143868 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2062c8b-2745-48ab-a93c-a8dd1847f0b9" containerName="glance-log" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.145005 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.145588 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.148709 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.153623 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.236092 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.236151 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.236181 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-ceph\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.236239 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.236327 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-logs\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.248328 4835 scope.go:117] "RemoveContainer" containerID="84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.248367 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvhbh\" (UniqueName: \"kubernetes.io/projected/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-kube-api-access-jvhbh\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: E0202 17:37:44.248862 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40\": container with ID starting with 84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40 not found: ID does not exist" containerID="84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.248888 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.248895 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40"} err="failed to get container status \"84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40\": rpc error: code = NotFound desc = could not find container \"84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40\": container with ID starting with 84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40 not found: ID does not exist" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.248915 4835 scope.go:117] "RemoveContainer" containerID="53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.248936 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.248960 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: E0202 17:37:44.249589 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8\": container with ID starting with 53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8 not found: ID does not exist" containerID="53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.249634 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8"} err="failed to get container status \"53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8\": rpc error: code = NotFound desc = could not find container \"53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8\": container with ID starting with 53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8 not found: ID does not exist" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.249656 4835 scope.go:117] "RemoveContainer" containerID="84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.253084 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40"} err="failed to get container status \"84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40\": rpc error: code = NotFound desc = could not find container \"84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40\": container with ID starting with 84d94be374d2d5fcda53147b7db421642a430ef9376099058fc96b2813ba0a40 not found: ID does not exist" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.253112 4835 scope.go:117] "RemoveContainer" containerID="53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.257401 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8"} err="failed to get container status \"53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8\": rpc error: code = NotFound desc = could not find container \"53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8\": container with ID starting with 53d090265ab0fcb0fb916f43e5bda9bdd56435b9804aacfbccaa872ec41b54d8 not found: ID does not exist" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.287539 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.312908 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.350705 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.350903 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.350982 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.351001 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.351018 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-ceph\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.351052 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.351104 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-logs\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.351132 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvhbh\" (UniqueName: \"kubernetes.io/projected/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-kube-api-access-jvhbh\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.351210 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.352224 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.353106 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-logs\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.353842 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.362136 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.362424 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.363761 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-ceph\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.364710 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.365080 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.371382 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvhbh\" (UniqueName: \"kubernetes.io/projected/7f0c5f67-a208-4b73-9f8b-c924d61cdf9e-kube-api-access-jvhbh\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.449643 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e\") " pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.557150 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.800123 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.862875 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.863041 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzm4f\" (UniqueName: \"kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-kube-api-access-kzm4f\") pod \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.863082 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-public-tls-certs\") pod \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.863105 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-combined-ca-bundle\") pod \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.863142 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-config-data\") pod \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.863195 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-ceph\") pod \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.863218 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-scripts\") pod \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.863235 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-httpd-run\") pod \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.863322 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-logs\") pod \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\" (UID: \"eeeb4621-9a31-4c57-9c26-365af4a73cd7\") " Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.864057 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-logs" (OuterVolumeSpecName: "logs") pod "eeeb4621-9a31-4c57-9c26-365af4a73cd7" (UID: "eeeb4621-9a31-4c57-9c26-365af4a73cd7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.871659 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.871710 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.878777 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "eeeb4621-9a31-4c57-9c26-365af4a73cd7" (UID: "eeeb4621-9a31-4c57-9c26-365af4a73cd7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.879591 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-scripts" (OuterVolumeSpecName: "scripts") pod "eeeb4621-9a31-4c57-9c26-365af4a73cd7" (UID: "eeeb4621-9a31-4c57-9c26-365af4a73cd7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.890510 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "eeeb4621-9a31-4c57-9c26-365af4a73cd7" (UID: "eeeb4621-9a31-4c57-9c26-365af4a73cd7"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.890952 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-ceph" (OuterVolumeSpecName: "ceph") pod "eeeb4621-9a31-4c57-9c26-365af4a73cd7" (UID: "eeeb4621-9a31-4c57-9c26-365af4a73cd7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.908691 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-kube-api-access-kzm4f" (OuterVolumeSpecName: "kube-api-access-kzm4f") pod "eeeb4621-9a31-4c57-9c26-365af4a73cd7" (UID: "eeeb4621-9a31-4c57-9c26-365af4a73cd7"). InnerVolumeSpecName "kube-api-access-kzm4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.954474 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eeeb4621-9a31-4c57-9c26-365af4a73cd7" (UID: "eeeb4621-9a31-4c57-9c26-365af4a73cd7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.964937 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.967480 4835 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.967585 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeeb4621-9a31-4c57-9c26-365af4a73cd7-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.967777 4835 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.970463 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzm4f\" (UniqueName: \"kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-kube-api-access-kzm4f\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.970568 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.970644 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/eeeb4621-9a31-4c57-9c26-365af4a73cd7-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:44 crc kubenswrapper[4835]: I0202 17:37:44.982347 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-config-data" (OuterVolumeSpecName: "config-data") pod "eeeb4621-9a31-4c57-9c26-365af4a73cd7" (UID: "eeeb4621-9a31-4c57-9c26-365af4a73cd7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.005791 4835 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.025660 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "eeeb4621-9a31-4c57-9c26-365af4a73cd7" (UID: "eeeb4621-9a31-4c57-9c26-365af4a73cd7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.067514 4835 generic.go:334] "Generic (PLEG): container finished" podID="eeeb4621-9a31-4c57-9c26-365af4a73cd7" containerID="80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06" exitCode=0 Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.067542 4835 generic.go:334] "Generic (PLEG): container finished" podID="eeeb4621-9a31-4c57-9c26-365af4a73cd7" containerID="95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351" exitCode=143 Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.067586 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eeeb4621-9a31-4c57-9c26-365af4a73cd7","Type":"ContainerDied","Data":"80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06"} Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.067613 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eeeb4621-9a31-4c57-9c26-365af4a73cd7","Type":"ContainerDied","Data":"95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351"} Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.067623 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eeeb4621-9a31-4c57-9c26-365af4a73cd7","Type":"ContainerDied","Data":"66f4ecb9402ec3ecbcb24e7a275f6c2f5e7e1aacbba490ad7d567fc70807c8b9"} Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.067638 4835 scope.go:117] "RemoveContainer" containerID="80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.067630 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.071936 4835 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.071957 4835 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.071966 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeeb4621-9a31-4c57-9c26-365af4a73cd7-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.119303 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.139959 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.144341 4835 scope.go:117] "RemoveContainer" containerID="95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.154898 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 17:37:45 crc kubenswrapper[4835]: E0202 17:37:45.155236 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeeb4621-9a31-4c57-9c26-365af4a73cd7" containerName="glance-log" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.155254 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeeb4621-9a31-4c57-9c26-365af4a73cd7" containerName="glance-log" Feb 02 17:37:45 crc kubenswrapper[4835]: E0202 17:37:45.155296 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeeb4621-9a31-4c57-9c26-365af4a73cd7" containerName="glance-httpd" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.155305 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeeb4621-9a31-4c57-9c26-365af4a73cd7" containerName="glance-httpd" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.155475 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeeb4621-9a31-4c57-9c26-365af4a73cd7" containerName="glance-httpd" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.155493 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeeb4621-9a31-4c57-9c26-365af4a73cd7" containerName="glance-log" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.160388 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.162626 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.163287 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.167926 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.210812 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eeeb4621-9a31-4c57-9c26-365af4a73cd7" path="/var/lib/kubelet/pods/eeeb4621-9a31-4c57-9c26-365af4a73cd7/volumes" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.211497 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2062c8b-2745-48ab-a93c-a8dd1847f0b9" path="/var/lib/kubelet/pods/f2062c8b-2745-48ab-a93c-a8dd1847f0b9/volumes" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.212444 4835 scope.go:117] "RemoveContainer" containerID="80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06" Feb 02 17:37:45 crc kubenswrapper[4835]: E0202 17:37:45.212970 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06\": container with ID starting with 80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06 not found: ID does not exist" containerID="80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.213000 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06"} err="failed to get container status \"80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06\": rpc error: code = NotFound desc = could not find container \"80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06\": container with ID starting with 80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06 not found: ID does not exist" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.213020 4835 scope.go:117] "RemoveContainer" containerID="95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351" Feb 02 17:37:45 crc kubenswrapper[4835]: E0202 17:37:45.214268 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351\": container with ID starting with 95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351 not found: ID does not exist" containerID="95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.214312 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351"} err="failed to get container status \"95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351\": rpc error: code = NotFound desc = could not find container \"95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351\": container with ID starting with 95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351 not found: ID does not exist" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.214327 4835 scope.go:117] "RemoveContainer" containerID="80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.214631 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06"} err="failed to get container status \"80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06\": rpc error: code = NotFound desc = could not find container \"80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06\": container with ID starting with 80a6260add8f01f3a2232fc19501241914c744c1a18cdb38fed0d42fa97ddf06 not found: ID does not exist" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.214650 4835 scope.go:117] "RemoveContainer" containerID="95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.216536 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351"} err="failed to get container status \"95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351\": rpc error: code = NotFound desc = could not find container \"95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351\": container with ID starting with 95ae4b265d5eb4b795f8861f84e2767c486d6ef8ff083485ee7edb9985ea7351 not found: ID does not exist" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.276595 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.276646 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0454a882-7982-44f7-8f83-3be157de886a-ceph\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.276818 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.276961 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0454a882-7982-44f7-8f83-3be157de886a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.277134 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-scripts\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.277170 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-config-data\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.277188 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.277266 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfn86\" (UniqueName: \"kubernetes.io/projected/0454a882-7982-44f7-8f83-3be157de886a-kube-api-access-lfn86\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.277504 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0454a882-7982-44f7-8f83-3be157de886a-logs\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.362561 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 17:37:45 crc kubenswrapper[4835]: W0202 17:37:45.370802 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f0c5f67_a208_4b73_9f8b_c924d61cdf9e.slice/crio-d6de9972ddaad0c46cb249ed76ea304e0ea17bae5dd83642868e45ebfca6ae52 WatchSource:0}: Error finding container d6de9972ddaad0c46cb249ed76ea304e0ea17bae5dd83642868e45ebfca6ae52: Status 404 returned error can't find the container with id d6de9972ddaad0c46cb249ed76ea304e0ea17bae5dd83642868e45ebfca6ae52 Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.380267 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0454a882-7982-44f7-8f83-3be157de886a-logs\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.380396 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.380436 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0454a882-7982-44f7-8f83-3be157de886a-ceph\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.380477 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.381888 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0454a882-7982-44f7-8f83-3be157de886a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.382035 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-scripts\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.382063 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-config-data\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.382083 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.382120 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfn86\" (UniqueName: \"kubernetes.io/projected/0454a882-7982-44f7-8f83-3be157de886a-kube-api-access-lfn86\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.381308 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0454a882-7982-44f7-8f83-3be157de886a-logs\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.380969 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.386062 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0454a882-7982-44f7-8f83-3be157de886a-ceph\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.386070 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0454a882-7982-44f7-8f83-3be157de886a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.395325 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-config-data\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.395355 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-scripts\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.397408 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.397578 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0454a882-7982-44f7-8f83-3be157de886a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.420410 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfn86\" (UniqueName: \"kubernetes.io/projected/0454a882-7982-44f7-8f83-3be157de886a-kube-api-access-lfn86\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.444544 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"0454a882-7982-44f7-8f83-3be157de886a\") " pod="openstack/glance-default-external-api-0" Feb 02 17:37:45 crc kubenswrapper[4835]: I0202 17:37:45.491394 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 17:37:46 crc kubenswrapper[4835]: I0202 17:37:46.089494 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e","Type":"ContainerStarted","Data":"d6de9972ddaad0c46cb249ed76ea304e0ea17bae5dd83642868e45ebfca6ae52"} Feb 02 17:37:46 crc kubenswrapper[4835]: I0202 17:37:46.185047 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 17:37:46 crc kubenswrapper[4835]: W0202 17:37:46.198385 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0454a882_7982_44f7_8f83_3be157de886a.slice/crio-84920bb78bbfe58c6ba0cab30eb09f4ffac213bb696d8d764750daecc38af794 WatchSource:0}: Error finding container 84920bb78bbfe58c6ba0cab30eb09f4ffac213bb696d8d764750daecc38af794: Status 404 returned error can't find the container with id 84920bb78bbfe58c6ba0cab30eb09f4ffac213bb696d8d764750daecc38af794 Feb 02 17:37:47 crc kubenswrapper[4835]: I0202 17:37:47.148677 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e","Type":"ContainerStarted","Data":"0b4a345ba852ec2b3536c09a0366f94568fbacdedaab1ba7bd7490b83d6fae2b"} Feb 02 17:37:47 crc kubenswrapper[4835]: I0202 17:37:47.151422 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0454a882-7982-44f7-8f83-3be157de886a","Type":"ContainerStarted","Data":"7a58dbcbea44bb9efb3c463af166510cdfcf32fec9dd9bc44816b0593eaf4abd"} Feb 02 17:37:47 crc kubenswrapper[4835]: I0202 17:37:47.151469 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0454a882-7982-44f7-8f83-3be157de886a","Type":"ContainerStarted","Data":"84920bb78bbfe58c6ba0cab30eb09f4ffac213bb696d8d764750daecc38af794"} Feb 02 17:37:48 crc kubenswrapper[4835]: I0202 17:37:48.184108 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7f0c5f67-a208-4b73-9f8b-c924d61cdf9e","Type":"ContainerStarted","Data":"5b5259d27271230d250ad0de0cabfed46783a3d3bb406dc88ae97eb78d4ab41c"} Feb 02 17:37:48 crc kubenswrapper[4835]: I0202 17:37:48.189625 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0454a882-7982-44f7-8f83-3be157de886a","Type":"ContainerStarted","Data":"db8b138ba773f76c4e391d420af9f30da5a5b4cdff22572583cf06249a53ed1a"} Feb 02 17:37:48 crc kubenswrapper[4835]: I0202 17:37:48.232575 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.232552976 podStartE2EDuration="4.232552976s" podCreationTimestamp="2026-02-02 17:37:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:37:48.231391403 +0000 UTC m=+2859.852995483" watchObservedRunningTime="2026-02-02 17:37:48.232552976 +0000 UTC m=+2859.854157066" Feb 02 17:37:48 crc kubenswrapper[4835]: I0202 17:37:48.294767 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.294740911 podStartE2EDuration="3.294740911s" podCreationTimestamp="2026-02-02 17:37:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:37:48.283727668 +0000 UTC m=+2859.905331748" watchObservedRunningTime="2026-02-02 17:37:48.294740911 +0000 UTC m=+2859.916344991" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.509786 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.520648 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.794983 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-xtzts"] Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.796501 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.798657 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-b4mbg" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.798780 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.809491 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-xtzts"] Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.885887 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-config-data\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.885951 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-combined-ca-bundle\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.885971 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-job-config-data\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.885999 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgxgl\" (UniqueName: \"kubernetes.io/projected/1664af25-6941-454e-9a16-2f27b62d4433-kube-api-access-cgxgl\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.987648 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-config-data\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.987715 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-combined-ca-bundle\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.987738 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-job-config-data\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.987770 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgxgl\" (UniqueName: \"kubernetes.io/projected/1664af25-6941-454e-9a16-2f27b62d4433-kube-api-access-cgxgl\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:49 crc kubenswrapper[4835]: I0202 17:37:49.996702 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-job-config-data\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:50 crc kubenswrapper[4835]: I0202 17:37:50.004217 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgxgl\" (UniqueName: \"kubernetes.io/projected/1664af25-6941-454e-9a16-2f27b62d4433-kube-api-access-cgxgl\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:50 crc kubenswrapper[4835]: I0202 17:37:50.009894 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-config-data\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:50 crc kubenswrapper[4835]: I0202 17:37:50.012734 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-combined-ca-bundle\") pod \"manila-db-sync-xtzts\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:50 crc kubenswrapper[4835]: I0202 17:37:50.127883 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-xtzts" Feb 02 17:37:54 crc kubenswrapper[4835]: I0202 17:37:54.297854 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db7c95659-6vg2h" event={"ID":"86972842-42a5-46ce-b163-62b8f57571d3","Type":"ContainerStarted","Data":"b14a9001a4a0e9a0883b318471220f2b7d9a6ece766954c7255dc3cc693e6bec"} Feb 02 17:37:54 crc kubenswrapper[4835]: I0202 17:37:54.305140 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56ddff97fb-66qgb" event={"ID":"22b32279-2087-4a32-84ac-38c8b84d6a4d","Type":"ContainerStarted","Data":"586adfcb28f1300cc5d1b9ca3414d9043ae073502e1c357412494828fd854505"} Feb 02 17:37:54 crc kubenswrapper[4835]: I0202 17:37:54.308389 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f575cdbb6-2fppg" event={"ID":"fec30fb3-23dc-4443-a90f-4fb8defb3a1f","Type":"ContainerStarted","Data":"61d16d70c2064675c5903cbbda9c2c885867185481a4d5abee42b8b63bfc6057"} Feb 02 17:37:54 crc kubenswrapper[4835]: I0202 17:37:54.309928 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7787d795cf-xvfgz" event={"ID":"b89e0428-ff08-413f-aad7-6686319cf0fd","Type":"ContainerStarted","Data":"3d32bc87b05b384ce9e37217abf2604df63c4e3ae386fefadae1670b8fcd0905"} Feb 02 17:37:54 crc kubenswrapper[4835]: I0202 17:37:54.381858 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-xtzts"] Feb 02 17:37:54 crc kubenswrapper[4835]: W0202 17:37:54.409129 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1664af25_6941_454e_9a16_2f27b62d4433.slice/crio-ad4808761a81908c3ca2f9ded88de9065c42e111e12595fe6f05fba435f53803 WatchSource:0}: Error finding container ad4808761a81908c3ca2f9ded88de9065c42e111e12595fe6f05fba435f53803: Status 404 returned error can't find the container with id ad4808761a81908c3ca2f9ded88de9065c42e111e12595fe6f05fba435f53803 Feb 02 17:37:54 crc kubenswrapper[4835]: I0202 17:37:54.558551 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:54 crc kubenswrapper[4835]: I0202 17:37:54.558996 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:54 crc kubenswrapper[4835]: I0202 17:37:54.605919 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:54 crc kubenswrapper[4835]: I0202 17:37:54.606616 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.338407 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f575cdbb6-2fppg" event={"ID":"fec30fb3-23dc-4443-a90f-4fb8defb3a1f","Type":"ContainerStarted","Data":"091c647d7fcf42100b502244519b9fbe415d238e24f3cf8e92d9767c2b613c11"} Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.342862 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7787d795cf-xvfgz" event={"ID":"b89e0428-ff08-413f-aad7-6686319cf0fd","Type":"ContainerStarted","Data":"c5261cacd77fb17ee57d4cd4cc8eb2afcf9f859db0355ec1da50e2bf72223f12"} Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.342949 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7787d795cf-xvfgz" podUID="b89e0428-ff08-413f-aad7-6686319cf0fd" containerName="horizon-log" containerID="cri-o://3d32bc87b05b384ce9e37217abf2604df63c4e3ae386fefadae1670b8fcd0905" gracePeriod=30 Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.342983 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7787d795cf-xvfgz" podUID="b89e0428-ff08-413f-aad7-6686319cf0fd" containerName="horizon" containerID="cri-o://c5261cacd77fb17ee57d4cd4cc8eb2afcf9f859db0355ec1da50e2bf72223f12" gracePeriod=30 Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.346700 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db7c95659-6vg2h" event={"ID":"86972842-42a5-46ce-b163-62b8f57571d3","Type":"ContainerStarted","Data":"0a25ec9dab326d88c4484560939e7ed123471fdb14773ad4d7a63c8d1d199fe9"} Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.346855 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7db7c95659-6vg2h" podUID="86972842-42a5-46ce-b163-62b8f57571d3" containerName="horizon-log" containerID="cri-o://b14a9001a4a0e9a0883b318471220f2b7d9a6ece766954c7255dc3cc693e6bec" gracePeriod=30 Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.346936 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7db7c95659-6vg2h" podUID="86972842-42a5-46ce-b163-62b8f57571d3" containerName="horizon" containerID="cri-o://0a25ec9dab326d88c4484560939e7ed123471fdb14773ad4d7a63c8d1d199fe9" gracePeriod=30 Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.350386 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-xtzts" event={"ID":"1664af25-6941-454e-9a16-2f27b62d4433","Type":"ContainerStarted","Data":"ad4808761a81908c3ca2f9ded88de9065c42e111e12595fe6f05fba435f53803"} Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.359380 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56ddff97fb-66qgb" event={"ID":"22b32279-2087-4a32-84ac-38c8b84d6a4d","Type":"ContainerStarted","Data":"b4a83f70f2990b82ef14ada3b958133f1bf6cc74c4248b3aaa4bf89501b57bef"} Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.359419 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.359429 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.371840 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5f575cdbb6-2fppg" podStartSLOduration=3.19882838 podStartE2EDuration="13.37182143s" podCreationTimestamp="2026-02-02 17:37:42 +0000 UTC" firstStartedPulling="2026-02-02 17:37:43.709626657 +0000 UTC m=+2855.331230737" lastFinishedPulling="2026-02-02 17:37:53.882619707 +0000 UTC m=+2865.504223787" observedRunningTime="2026-02-02 17:37:55.365588583 +0000 UTC m=+2866.987192683" watchObservedRunningTime="2026-02-02 17:37:55.37182143 +0000 UTC m=+2866.993425510" Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.394421 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7db7c95659-6vg2h" podStartSLOduration=3.37151742 podStartE2EDuration="16.394400671s" podCreationTimestamp="2026-02-02 17:37:39 +0000 UTC" firstStartedPulling="2026-02-02 17:37:40.858126621 +0000 UTC m=+2852.479730701" lastFinishedPulling="2026-02-02 17:37:53.881009862 +0000 UTC m=+2865.502613952" observedRunningTime="2026-02-02 17:37:55.391812447 +0000 UTC m=+2867.013416537" watchObservedRunningTime="2026-02-02 17:37:55.394400671 +0000 UTC m=+2867.016004741" Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.471030 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7787d795cf-xvfgz" podStartSLOduration=3.41767822 podStartE2EDuration="16.470988974s" podCreationTimestamp="2026-02-02 17:37:39 +0000 UTC" firstStartedPulling="2026-02-02 17:37:40.749560911 +0000 UTC m=+2852.371164991" lastFinishedPulling="2026-02-02 17:37:53.802871625 +0000 UTC m=+2865.424475745" observedRunningTime="2026-02-02 17:37:55.453762255 +0000 UTC m=+2867.075366345" watchObservedRunningTime="2026-02-02 17:37:55.470988974 +0000 UTC m=+2867.092593054" Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.474741 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-56ddff97fb-66qgb" podStartSLOduration=2.761139012 podStartE2EDuration="13.47472206s" podCreationTimestamp="2026-02-02 17:37:42 +0000 UTC" firstStartedPulling="2026-02-02 17:37:43.094316229 +0000 UTC m=+2854.715920309" lastFinishedPulling="2026-02-02 17:37:53.807899267 +0000 UTC m=+2865.429503357" observedRunningTime="2026-02-02 17:37:55.432822291 +0000 UTC m=+2867.054426371" watchObservedRunningTime="2026-02-02 17:37:55.47472206 +0000 UTC m=+2867.096326140" Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.492877 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.492926 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.539830 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 17:37:55 crc kubenswrapper[4835]: I0202 17:37:55.568637 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 17:37:56 crc kubenswrapper[4835]: I0202 17:37:56.383031 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 17:37:56 crc kubenswrapper[4835]: I0202 17:37:56.383334 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 17:37:57 crc kubenswrapper[4835]: I0202 17:37:57.387803 4835 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 17:37:57 crc kubenswrapper[4835]: I0202 17:37:57.387832 4835 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 17:37:57 crc kubenswrapper[4835]: I0202 17:37:57.679213 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:57 crc kubenswrapper[4835]: I0202 17:37:57.693294 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 17:37:58 crc kubenswrapper[4835]: I0202 17:37:58.395818 4835 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 17:37:58 crc kubenswrapper[4835]: I0202 17:37:58.395845 4835 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 17:37:59 crc kubenswrapper[4835]: I0202 17:37:59.152792 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 17:37:59 crc kubenswrapper[4835]: I0202 17:37:59.216906 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 17:37:59 crc kubenswrapper[4835]: I0202 17:37:59.946236 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:38:00 crc kubenswrapper[4835]: I0202 17:38:00.060797 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:38:02 crc kubenswrapper[4835]: I0202 17:38:02.437188 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-xtzts" event={"ID":"1664af25-6941-454e-9a16-2f27b62d4433","Type":"ContainerStarted","Data":"6267e780bd0e439f5147ae3b2e856c30f257d95165f74803197f87a621cfa863"} Feb 02 17:38:02 crc kubenswrapper[4835]: I0202 17:38:02.479795 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-xtzts" podStartSLOduration=6.790629853 podStartE2EDuration="13.479766455s" podCreationTimestamp="2026-02-02 17:37:49 +0000 UTC" firstStartedPulling="2026-02-02 17:37:54.413556862 +0000 UTC m=+2866.035160942" lastFinishedPulling="2026-02-02 17:38:01.102693464 +0000 UTC m=+2872.724297544" observedRunningTime="2026-02-02 17:38:02.460125738 +0000 UTC m=+2874.081729828" watchObservedRunningTime="2026-02-02 17:38:02.479766455 +0000 UTC m=+2874.101370575" Feb 02 17:38:02 crc kubenswrapper[4835]: I0202 17:38:02.694532 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:38:02 crc kubenswrapper[4835]: I0202 17:38:02.694610 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:38:03 crc kubenswrapper[4835]: I0202 17:38:03.004261 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:38:03 crc kubenswrapper[4835]: I0202 17:38:03.004332 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:38:12 crc kubenswrapper[4835]: I0202 17:38:12.547144 4835 generic.go:334] "Generic (PLEG): container finished" podID="1664af25-6941-454e-9a16-2f27b62d4433" containerID="6267e780bd0e439f5147ae3b2e856c30f257d95165f74803197f87a621cfa863" exitCode=0 Feb 02 17:38:12 crc kubenswrapper[4835]: I0202 17:38:12.547228 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-xtzts" event={"ID":"1664af25-6941-454e-9a16-2f27b62d4433","Type":"ContainerDied","Data":"6267e780bd0e439f5147ae3b2e856c30f257d95165f74803197f87a621cfa863"} Feb 02 17:38:12 crc kubenswrapper[4835]: I0202 17:38:12.696232 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-56ddff97fb-66qgb" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.248:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.248:8443: connect: connection refused" Feb 02 17:38:13 crc kubenswrapper[4835]: I0202 17:38:13.005840 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5f575cdbb6-2fppg" podUID="fec30fb3-23dc-4443-a90f-4fb8defb3a1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.249:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.249:8443: connect: connection refused" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.002048 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-xtzts" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.133107 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-config-data\") pod \"1664af25-6941-454e-9a16-2f27b62d4433\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.133506 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-job-config-data\") pod \"1664af25-6941-454e-9a16-2f27b62d4433\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.133633 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-combined-ca-bundle\") pod \"1664af25-6941-454e-9a16-2f27b62d4433\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.133725 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgxgl\" (UniqueName: \"kubernetes.io/projected/1664af25-6941-454e-9a16-2f27b62d4433-kube-api-access-cgxgl\") pod \"1664af25-6941-454e-9a16-2f27b62d4433\" (UID: \"1664af25-6941-454e-9a16-2f27b62d4433\") " Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.140257 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1664af25-6941-454e-9a16-2f27b62d4433-kube-api-access-cgxgl" (OuterVolumeSpecName: "kube-api-access-cgxgl") pod "1664af25-6941-454e-9a16-2f27b62d4433" (UID: "1664af25-6941-454e-9a16-2f27b62d4433"). InnerVolumeSpecName "kube-api-access-cgxgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.142536 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "1664af25-6941-454e-9a16-2f27b62d4433" (UID: "1664af25-6941-454e-9a16-2f27b62d4433"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.153432 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-config-data" (OuterVolumeSpecName: "config-data") pod "1664af25-6941-454e-9a16-2f27b62d4433" (UID: "1664af25-6941-454e-9a16-2f27b62d4433"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.188388 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1664af25-6941-454e-9a16-2f27b62d4433" (UID: "1664af25-6941-454e-9a16-2f27b62d4433"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.235745 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.235787 4835 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-job-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.235801 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1664af25-6941-454e-9a16-2f27b62d4433-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.235813 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgxgl\" (UniqueName: \"kubernetes.io/projected/1664af25-6941-454e-9a16-2f27b62d4433-kube-api-access-cgxgl\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.586191 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-xtzts" event={"ID":"1664af25-6941-454e-9a16-2f27b62d4433","Type":"ContainerDied","Data":"ad4808761a81908c3ca2f9ded88de9065c42e111e12595fe6f05fba435f53803"} Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.586237 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad4808761a81908c3ca2f9ded88de9065c42e111e12595fe6f05fba435f53803" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.586317 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-xtzts" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.870374 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.870713 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.931085 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Feb 02 17:38:14 crc kubenswrapper[4835]: E0202 17:38:14.931554 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1664af25-6941-454e-9a16-2f27b62d4433" containerName="manila-db-sync" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.931571 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="1664af25-6941-454e-9a16-2f27b62d4433" containerName="manila-db-sync" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.931770 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="1664af25-6941-454e-9a16-2f27b62d4433" containerName="manila-db-sync" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.932740 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.936548 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.936592 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.936779 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.936983 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-b4mbg" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.957371 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.959326 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.965648 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.973802 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Feb 02 17:38:14 crc kubenswrapper[4835]: I0202 17:38:14.993243 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.052479 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.053243 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-scripts\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.053583 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.053793 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89gdr\" (UniqueName: \"kubernetes.io/projected/0e892d57-f496-42fa-94fd-d8d458fa61cc-kube-api-access-89gdr\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.053917 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-scripts\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.054114 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.054255 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.054408 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.054537 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-ceph\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.054649 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0e892d57-f496-42fa-94fd-d8d458fa61cc-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.054765 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.054899 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.055014 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwxdg\" (UniqueName: \"kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-kube-api-access-fwxdg\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.055143 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.064935 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69655fd4bf-zddfv"] Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.067184 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.085212 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69655fd4bf-zddfv"] Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157318 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157369 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157392 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwxdg\" (UniqueName: \"kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-kube-api-access-fwxdg\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157419 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157467 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157496 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-scripts\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157567 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157608 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89gdr\" (UniqueName: \"kubernetes.io/projected/0e892d57-f496-42fa-94fd-d8d458fa61cc-kube-api-access-89gdr\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157625 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-scripts\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157652 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157669 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157686 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157703 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-ceph\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157719 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0e892d57-f496-42fa-94fd-d8d458fa61cc-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157797 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0e892d57-f496-42fa-94fd-d8d458fa61cc-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.157937 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.164322 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.168016 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.170516 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.172491 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-scripts\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.174758 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.175193 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-ceph\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.175484 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.180815 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.184772 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.191025 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwxdg\" (UniqueName: \"kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-kube-api-access-fwxdg\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.197796 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89gdr\" (UniqueName: \"kubernetes.io/projected/0e892d57-f496-42fa-94fd-d8d458fa61cc-kube-api-access-89gdr\") pod \"manila-scheduler-0\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.197817 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-scripts\") pod \"manila-share-share1-0\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.251343 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.256347 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.259568 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-dns-svc\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.259605 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-ovsdbserver-sb\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.259634 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-ovsdbserver-nb\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.259679 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpznv\" (UniqueName: \"kubernetes.io/projected/12c4e956-4456-4f8e-b802-1db95f550d51-kube-api-access-cpznv\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.259699 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-openstack-edpm-ipam\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.259745 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-config\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.260002 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.260829 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.284896 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.288487 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.362641 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwcvc\" (UniqueName: \"kubernetes.io/projected/7b20186c-1dfc-4278-a45a-ac2b4b168765-kube-api-access-bwcvc\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.363122 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7b20186c-1dfc-4278-a45a-ac2b4b168765-etc-machine-id\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.363240 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data-custom\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.363298 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-dns-svc\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.363326 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-ovsdbserver-sb\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.364356 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-ovsdbserver-sb\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.364397 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-ovsdbserver-nb\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.364425 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-scripts\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.364477 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpznv\" (UniqueName: \"kubernetes.io/projected/12c4e956-4456-4f8e-b802-1db95f550d51-kube-api-access-cpznv\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.364495 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.364523 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-openstack-edpm-ipam\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.364562 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-dns-svc\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.364576 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b20186c-1dfc-4278-a45a-ac2b4b168765-logs\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.364646 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-config\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.364668 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.365063 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-ovsdbserver-nb\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.365406 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-config\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.365814 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/12c4e956-4456-4f8e-b802-1db95f550d51-openstack-edpm-ipam\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.398428 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpznv\" (UniqueName: \"kubernetes.io/projected/12c4e956-4456-4f8e-b802-1db95f550d51-kube-api-access-cpznv\") pod \"dnsmasq-dns-69655fd4bf-zddfv\" (UID: \"12c4e956-4456-4f8e-b802-1db95f550d51\") " pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.398900 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.466386 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data-custom\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.466660 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-scripts\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.466707 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.466751 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b20186c-1dfc-4278-a45a-ac2b4b168765-logs\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.466773 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.466794 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwcvc\" (UniqueName: \"kubernetes.io/projected/7b20186c-1dfc-4278-a45a-ac2b4b168765-kube-api-access-bwcvc\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.466821 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7b20186c-1dfc-4278-a45a-ac2b4b168765-etc-machine-id\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.466983 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7b20186c-1dfc-4278-a45a-ac2b4b168765-etc-machine-id\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.467974 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b20186c-1dfc-4278-a45a-ac2b4b168765-logs\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.482567 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.487743 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwcvc\" (UniqueName: \"kubernetes.io/projected/7b20186c-1dfc-4278-a45a-ac2b4b168765-kube-api-access-bwcvc\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.488523 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-scripts\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.489261 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.511689 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data-custom\") pod \"manila-api-0\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.746223 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Feb 02 17:38:15 crc kubenswrapper[4835]: I0202 17:38:15.949427 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Feb 02 17:38:15 crc kubenswrapper[4835]: W0202 17:38:15.964472 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e892d57_f496_42fa_94fd_d8d458fa61cc.slice/crio-50d69ba1cf4d74fddd5af532f49f1d0dc759c0a12d78b9421ba2eecddca62490 WatchSource:0}: Error finding container 50d69ba1cf4d74fddd5af532f49f1d0dc759c0a12d78b9421ba2eecddca62490: Status 404 returned error can't find the container with id 50d69ba1cf4d74fddd5af532f49f1d0dc759c0a12d78b9421ba2eecddca62490 Feb 02 17:38:16 crc kubenswrapper[4835]: I0202 17:38:16.032539 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69655fd4bf-zddfv"] Feb 02 17:38:16 crc kubenswrapper[4835]: I0202 17:38:16.141760 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Feb 02 17:38:16 crc kubenswrapper[4835]: I0202 17:38:16.397983 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Feb 02 17:38:16 crc kubenswrapper[4835]: I0202 17:38:16.612158 4835 generic.go:334] "Generic (PLEG): container finished" podID="12c4e956-4456-4f8e-b802-1db95f550d51" containerID="aad809e5fd21b8cb4007f7279056be4c65d4d3686c7933b1d5728aa3c9dbc809" exitCode=0 Feb 02 17:38:16 crc kubenswrapper[4835]: I0202 17:38:16.612261 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" event={"ID":"12c4e956-4456-4f8e-b802-1db95f550d51","Type":"ContainerDied","Data":"aad809e5fd21b8cb4007f7279056be4c65d4d3686c7933b1d5728aa3c9dbc809"} Feb 02 17:38:16 crc kubenswrapper[4835]: I0202 17:38:16.612342 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" event={"ID":"12c4e956-4456-4f8e-b802-1db95f550d51","Type":"ContainerStarted","Data":"c012ce8862e3f6fb39dde8397dfe1764cce71c9876d04efac6a3fb4bcffa7358"} Feb 02 17:38:16 crc kubenswrapper[4835]: I0202 17:38:16.617968 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"7b20186c-1dfc-4278-a45a-ac2b4b168765","Type":"ContainerStarted","Data":"39d2d224493811be02fd56a51a1453e969c080dfe72070294ed69d2723fc04ab"} Feb 02 17:38:16 crc kubenswrapper[4835]: I0202 17:38:16.619453 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"3118cb6d-c193-41a4-8edd-444205a0020e","Type":"ContainerStarted","Data":"a8057768d028c90abab5a16661d0ca9a034a1d26e643704901bb78f207e82854"} Feb 02 17:38:16 crc kubenswrapper[4835]: I0202 17:38:16.627692 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"0e892d57-f496-42fa-94fd-d8d458fa61cc","Type":"ContainerStarted","Data":"50d69ba1cf4d74fddd5af532f49f1d0dc759c0a12d78b9421ba2eecddca62490"} Feb 02 17:38:17 crc kubenswrapper[4835]: I0202 17:38:17.642981 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"7b20186c-1dfc-4278-a45a-ac2b4b168765","Type":"ContainerStarted","Data":"ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d"} Feb 02 17:38:17 crc kubenswrapper[4835]: I0202 17:38:17.646424 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" event={"ID":"12c4e956-4456-4f8e-b802-1db95f550d51","Type":"ContainerStarted","Data":"114125bda2c2b4bf2c188323e5ec4d6eb282bfa6bc9149ade08ee61029524adb"} Feb 02 17:38:17 crc kubenswrapper[4835]: I0202 17:38:17.647297 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:17 crc kubenswrapper[4835]: I0202 17:38:17.676114 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" podStartSLOduration=2.676094934 podStartE2EDuration="2.676094934s" podCreationTimestamp="2026-02-02 17:38:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:38:17.667117479 +0000 UTC m=+2889.288721569" watchObservedRunningTime="2026-02-02 17:38:17.676094934 +0000 UTC m=+2889.297699014" Feb 02 17:38:18 crc kubenswrapper[4835]: I0202 17:38:18.211112 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Feb 02 17:38:18 crc kubenswrapper[4835]: I0202 17:38:18.659885 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"0e892d57-f496-42fa-94fd-d8d458fa61cc","Type":"ContainerStarted","Data":"86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1"} Feb 02 17:38:18 crc kubenswrapper[4835]: I0202 17:38:18.659927 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"0e892d57-f496-42fa-94fd-d8d458fa61cc","Type":"ContainerStarted","Data":"4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d"} Feb 02 17:38:18 crc kubenswrapper[4835]: I0202 17:38:18.667046 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"7b20186c-1dfc-4278-a45a-ac2b4b168765","Type":"ContainerStarted","Data":"c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02"} Feb 02 17:38:18 crc kubenswrapper[4835]: I0202 17:38:18.687835 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.7585015630000003 podStartE2EDuration="4.68781571s" podCreationTimestamp="2026-02-02 17:38:14 +0000 UTC" firstStartedPulling="2026-02-02 17:38:15.978488988 +0000 UTC m=+2887.600093068" lastFinishedPulling="2026-02-02 17:38:16.907803135 +0000 UTC m=+2888.529407215" observedRunningTime="2026-02-02 17:38:18.678041873 +0000 UTC m=+2890.299645953" watchObservedRunningTime="2026-02-02 17:38:18.68781571 +0000 UTC m=+2890.309419790" Feb 02 17:38:19 crc kubenswrapper[4835]: I0202 17:38:19.249098 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=4.249074725 podStartE2EDuration="4.249074725s" podCreationTimestamp="2026-02-02 17:38:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:38:18.717827982 +0000 UTC m=+2890.339432072" watchObservedRunningTime="2026-02-02 17:38:19.249074725 +0000 UTC m=+2890.870678795" Feb 02 17:38:19 crc kubenswrapper[4835]: I0202 17:38:19.675872 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="7b20186c-1dfc-4278-a45a-ac2b4b168765" containerName="manila-api-log" containerID="cri-o://ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d" gracePeriod=30 Feb 02 17:38:19 crc kubenswrapper[4835]: I0202 17:38:19.676444 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="7b20186c-1dfc-4278-a45a-ac2b4b168765" containerName="manila-api" containerID="cri-o://c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02" gracePeriod=30 Feb 02 17:38:19 crc kubenswrapper[4835]: I0202 17:38:19.676790 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Feb 02 17:38:19 crc kubenswrapper[4835]: I0202 17:38:19.809872 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:19 crc kubenswrapper[4835]: I0202 17:38:19.810173 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="ceilometer-central-agent" containerID="cri-o://c72ca7ce364d4eb3149609bd6bfee1475b893e8d462415f7bb96626f8441952e" gracePeriod=30 Feb 02 17:38:19 crc kubenswrapper[4835]: I0202 17:38:19.810220 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="ceilometer-notification-agent" containerID="cri-o://3daad7985b2b993065441bb8348f7de7403d33ae16d6c1b6b71129c87b3f5105" gracePeriod=30 Feb 02 17:38:19 crc kubenswrapper[4835]: I0202 17:38:19.810372 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="sg-core" containerID="cri-o://645e2f4c25377d72277822fc61cb2669bfd3fad0f5444a02d456f99875ee0f4f" gracePeriod=30 Feb 02 17:38:19 crc kubenswrapper[4835]: I0202 17:38:19.810189 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="proxy-httpd" containerID="cri-o://30d59ac2b3a641c423176b98f72e51d9580fd2dbf22150c021d02826e8fee721" gracePeriod=30 Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.300860 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.428039 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-scripts\") pod \"7b20186c-1dfc-4278-a45a-ac2b4b168765\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.428173 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data\") pod \"7b20186c-1dfc-4278-a45a-ac2b4b168765\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.428232 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwcvc\" (UniqueName: \"kubernetes.io/projected/7b20186c-1dfc-4278-a45a-ac2b4b168765-kube-api-access-bwcvc\") pod \"7b20186c-1dfc-4278-a45a-ac2b4b168765\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.428342 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b20186c-1dfc-4278-a45a-ac2b4b168765-logs\") pod \"7b20186c-1dfc-4278-a45a-ac2b4b168765\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.428399 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data-custom\") pod \"7b20186c-1dfc-4278-a45a-ac2b4b168765\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.428423 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-combined-ca-bundle\") pod \"7b20186c-1dfc-4278-a45a-ac2b4b168765\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.428467 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7b20186c-1dfc-4278-a45a-ac2b4b168765-etc-machine-id\") pod \"7b20186c-1dfc-4278-a45a-ac2b4b168765\" (UID: \"7b20186c-1dfc-4278-a45a-ac2b4b168765\") " Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.428896 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7b20186c-1dfc-4278-a45a-ac2b4b168765-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7b20186c-1dfc-4278-a45a-ac2b4b168765" (UID: "7b20186c-1dfc-4278-a45a-ac2b4b168765"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.432198 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b20186c-1dfc-4278-a45a-ac2b4b168765-logs" (OuterVolumeSpecName: "logs") pod "7b20186c-1dfc-4278-a45a-ac2b4b168765" (UID: "7b20186c-1dfc-4278-a45a-ac2b4b168765"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.433920 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-scripts" (OuterVolumeSpecName: "scripts") pod "7b20186c-1dfc-4278-a45a-ac2b4b168765" (UID: "7b20186c-1dfc-4278-a45a-ac2b4b168765"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.434441 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b20186c-1dfc-4278-a45a-ac2b4b168765-kube-api-access-bwcvc" (OuterVolumeSpecName: "kube-api-access-bwcvc") pod "7b20186c-1dfc-4278-a45a-ac2b4b168765" (UID: "7b20186c-1dfc-4278-a45a-ac2b4b168765"). InnerVolumeSpecName "kube-api-access-bwcvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.435326 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7b20186c-1dfc-4278-a45a-ac2b4b168765" (UID: "7b20186c-1dfc-4278-a45a-ac2b4b168765"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.470308 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b20186c-1dfc-4278-a45a-ac2b4b168765" (UID: "7b20186c-1dfc-4278-a45a-ac2b4b168765"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.489886 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data" (OuterVolumeSpecName: "config-data") pod "7b20186c-1dfc-4278-a45a-ac2b4b168765" (UID: "7b20186c-1dfc-4278-a45a-ac2b4b168765"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.531055 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwcvc\" (UniqueName: \"kubernetes.io/projected/7b20186c-1dfc-4278-a45a-ac2b4b168765-kube-api-access-bwcvc\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.531099 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b20186c-1dfc-4278-a45a-ac2b4b168765-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.531112 4835 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.531125 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.531135 4835 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7b20186c-1dfc-4278-a45a-ac2b4b168765-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.531144 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.531154 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b20186c-1dfc-4278-a45a-ac2b4b168765-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.685883 4835 generic.go:334] "Generic (PLEG): container finished" podID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerID="30d59ac2b3a641c423176b98f72e51d9580fd2dbf22150c021d02826e8fee721" exitCode=0 Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.685911 4835 generic.go:334] "Generic (PLEG): container finished" podID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerID="645e2f4c25377d72277822fc61cb2669bfd3fad0f5444a02d456f99875ee0f4f" exitCode=2 Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.685921 4835 generic.go:334] "Generic (PLEG): container finished" podID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerID="c72ca7ce364d4eb3149609bd6bfee1475b893e8d462415f7bb96626f8441952e" exitCode=0 Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.685978 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ef884a8-061e-4f93-b4c9-9149a5f10f9f","Type":"ContainerDied","Data":"30d59ac2b3a641c423176b98f72e51d9580fd2dbf22150c021d02826e8fee721"} Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.686036 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ef884a8-061e-4f93-b4c9-9149a5f10f9f","Type":"ContainerDied","Data":"645e2f4c25377d72277822fc61cb2669bfd3fad0f5444a02d456f99875ee0f4f"} Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.686062 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ef884a8-061e-4f93-b4c9-9149a5f10f9f","Type":"ContainerDied","Data":"c72ca7ce364d4eb3149609bd6bfee1475b893e8d462415f7bb96626f8441952e"} Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.689140 4835 generic.go:334] "Generic (PLEG): container finished" podID="7b20186c-1dfc-4278-a45a-ac2b4b168765" containerID="c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02" exitCode=0 Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.689165 4835 generic.go:334] "Generic (PLEG): container finished" podID="7b20186c-1dfc-4278-a45a-ac2b4b168765" containerID="ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d" exitCode=143 Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.689188 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"7b20186c-1dfc-4278-a45a-ac2b4b168765","Type":"ContainerDied","Data":"c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02"} Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.689217 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"7b20186c-1dfc-4278-a45a-ac2b4b168765","Type":"ContainerDied","Data":"ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d"} Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.689217 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.689237 4835 scope.go:117] "RemoveContainer" containerID="c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.689226 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"7b20186c-1dfc-4278-a45a-ac2b4b168765","Type":"ContainerDied","Data":"39d2d224493811be02fd56a51a1453e969c080dfe72070294ed69d2723fc04ab"} Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.730572 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.749885 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-api-0"] Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.762443 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Feb 02 17:38:20 crc kubenswrapper[4835]: E0202 17:38:20.763015 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b20186c-1dfc-4278-a45a-ac2b4b168765" containerName="manila-api-log" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.763043 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b20186c-1dfc-4278-a45a-ac2b4b168765" containerName="manila-api-log" Feb 02 17:38:20 crc kubenswrapper[4835]: E0202 17:38:20.763078 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b20186c-1dfc-4278-a45a-ac2b4b168765" containerName="manila-api" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.763087 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b20186c-1dfc-4278-a45a-ac2b4b168765" containerName="manila-api" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.763351 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b20186c-1dfc-4278-a45a-ac2b4b168765" containerName="manila-api-log" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.763385 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b20186c-1dfc-4278-a45a-ac2b4b168765" containerName="manila-api" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.764787 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.771161 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.771648 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-internal-svc" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.771886 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-public-svc" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.771969 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.952514 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9phbn\" (UniqueName: \"kubernetes.io/projected/3a4997a1-3860-46d1-ba9f-a81c6800aec9-kube-api-access-9phbn\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.952579 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-config-data\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.952656 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a4997a1-3860-46d1-ba9f-a81c6800aec9-logs\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.952697 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-internal-tls-certs\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.952739 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.953991 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3a4997a1-3860-46d1-ba9f-a81c6800aec9-etc-machine-id\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.954052 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-public-tls-certs\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.954115 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-config-data-custom\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:20 crc kubenswrapper[4835]: I0202 17:38:20.954164 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-scripts\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.056265 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-internal-tls-certs\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.056330 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.056381 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3a4997a1-3860-46d1-ba9f-a81c6800aec9-etc-machine-id\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.056423 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-public-tls-certs\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.056447 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-config-data-custom\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.056469 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-scripts\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.056567 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9phbn\" (UniqueName: \"kubernetes.io/projected/3a4997a1-3860-46d1-ba9f-a81c6800aec9-kube-api-access-9phbn\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.056592 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-config-data\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.056615 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a4997a1-3860-46d1-ba9f-a81c6800aec9-logs\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.056997 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a4997a1-3860-46d1-ba9f-a81c6800aec9-logs\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.057700 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3a4997a1-3860-46d1-ba9f-a81c6800aec9-etc-machine-id\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.062162 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-config-data\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.063668 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.068109 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-config-data-custom\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.068956 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-scripts\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.070676 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-public-tls-certs\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.070767 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a4997a1-3860-46d1-ba9f-a81c6800aec9-internal-tls-certs\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.074812 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9phbn\" (UniqueName: \"kubernetes.io/projected/3a4997a1-3860-46d1-ba9f-a81c6800aec9-kube-api-access-9phbn\") pod \"manila-api-0\" (UID: \"3a4997a1-3860-46d1-ba9f-a81c6800aec9\") " pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.156251 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Feb 02 17:38:21 crc kubenswrapper[4835]: I0202 17:38:21.202759 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b20186c-1dfc-4278-a45a-ac2b4b168765" path="/var/lib/kubelet/pods/7b20186c-1dfc-4278-a45a-ac2b4b168765/volumes" Feb 02 17:38:22 crc kubenswrapper[4835]: I0202 17:38:22.718543 4835 generic.go:334] "Generic (PLEG): container finished" podID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerID="3daad7985b2b993065441bb8348f7de7403d33ae16d6c1b6b71129c87b3f5105" exitCode=0 Feb 02 17:38:22 crc kubenswrapper[4835]: I0202 17:38:22.718879 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ef884a8-061e-4f93-b4c9-9149a5f10f9f","Type":"ContainerDied","Data":"3daad7985b2b993065441bb8348f7de7403d33ae16d6c1b6b71129c87b3f5105"} Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.164613 4835 scope.go:117] "RemoveContainer" containerID="ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.542362 4835 scope.go:117] "RemoveContainer" containerID="c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02" Feb 02 17:38:23 crc kubenswrapper[4835]: E0202 17:38:23.545049 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02\": container with ID starting with c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02 not found: ID does not exist" containerID="c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.545127 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02"} err="failed to get container status \"c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02\": rpc error: code = NotFound desc = could not find container \"c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02\": container with ID starting with c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02 not found: ID does not exist" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.545180 4835 scope.go:117] "RemoveContainer" containerID="ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d" Feb 02 17:38:23 crc kubenswrapper[4835]: E0202 17:38:23.545649 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d\": container with ID starting with ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d not found: ID does not exist" containerID="ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.545706 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d"} err="failed to get container status \"ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d\": rpc error: code = NotFound desc = could not find container \"ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d\": container with ID starting with ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d not found: ID does not exist" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.545727 4835 scope.go:117] "RemoveContainer" containerID="c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.545986 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02"} err="failed to get container status \"c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02\": rpc error: code = NotFound desc = could not find container \"c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02\": container with ID starting with c26dac13a28186243f7e59debd32fe438ce8f6a9e95dbe63c43beabbf4a83f02 not found: ID does not exist" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.546032 4835 scope.go:117] "RemoveContainer" containerID="ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.546452 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d"} err="failed to get container status \"ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d\": rpc error: code = NotFound desc = could not find container \"ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d\": container with ID starting with ab2b800d8d05f3e29b9c5f0ae5ff43d48b7ad446b38033fa4798fc4c0f5abb1d not found: ID does not exist" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.658436 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.731291 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ef884a8-061e-4f93-b4c9-9149a5f10f9f","Type":"ContainerDied","Data":"56c3fc9a64700931866c158be2d0c0153dd5c4e2ae380270634a8562ba56a722"} Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.731345 4835 scope.go:117] "RemoveContainer" containerID="30d59ac2b3a641c423176b98f72e51d9580fd2dbf22150c021d02826e8fee721" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.731496 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.811456 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-run-httpd\") pod \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.811514 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-sg-core-conf-yaml\") pod \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.811590 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-config-data\") pod \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.811635 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-ceilometer-tls-certs\") pod \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.811689 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2tdv\" (UniqueName: \"kubernetes.io/projected/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-kube-api-access-v2tdv\") pod \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.811717 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-log-httpd\") pod \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.811763 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-scripts\") pod \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.811829 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-combined-ca-bundle\") pod \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\" (UID: \"5ef884a8-061e-4f93-b4c9-9149a5f10f9f\") " Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.812343 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5ef884a8-061e-4f93-b4c9-9149a5f10f9f" (UID: "5ef884a8-061e-4f93-b4c9-9149a5f10f9f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.812812 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5ef884a8-061e-4f93-b4c9-9149a5f10f9f" (UID: "5ef884a8-061e-4f93-b4c9-9149a5f10f9f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.818994 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-scripts" (OuterVolumeSpecName: "scripts") pod "5ef884a8-061e-4f93-b4c9-9149a5f10f9f" (UID: "5ef884a8-061e-4f93-b4c9-9149a5f10f9f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.832724 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-kube-api-access-v2tdv" (OuterVolumeSpecName: "kube-api-access-v2tdv") pod "5ef884a8-061e-4f93-b4c9-9149a5f10f9f" (UID: "5ef884a8-061e-4f93-b4c9-9149a5f10f9f"). InnerVolumeSpecName "kube-api-access-v2tdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.852551 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5ef884a8-061e-4f93-b4c9-9149a5f10f9f" (UID: "5ef884a8-061e-4f93-b4c9-9149a5f10f9f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.888840 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "5ef884a8-061e-4f93-b4c9-9149a5f10f9f" (UID: "5ef884a8-061e-4f93-b4c9-9149a5f10f9f"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.899789 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ef884a8-061e-4f93-b4c9-9149a5f10f9f" (UID: "5ef884a8-061e-4f93-b4c9-9149a5f10f9f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.907388 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-config-data" (OuterVolumeSpecName: "config-data") pod "5ef884a8-061e-4f93-b4c9-9149a5f10f9f" (UID: "5ef884a8-061e-4f93-b4c9-9149a5f10f9f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.913745 4835 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.913777 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2tdv\" (UniqueName: \"kubernetes.io/projected/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-kube-api-access-v2tdv\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.913791 4835 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.913799 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.913809 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.913818 4835 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.913826 4835 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:23 crc kubenswrapper[4835]: I0202 17:38:23.913834 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ef884a8-061e-4f93-b4c9-9149a5f10f9f-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.076432 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.085831 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.097133 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:24 crc kubenswrapper[4835]: E0202 17:38:24.097543 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="proxy-httpd" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.097560 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="proxy-httpd" Feb 02 17:38:24 crc kubenswrapper[4835]: E0202 17:38:24.097581 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="ceilometer-central-agent" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.097587 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="ceilometer-central-agent" Feb 02 17:38:24 crc kubenswrapper[4835]: E0202 17:38:24.097601 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="ceilometer-notification-agent" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.097607 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="ceilometer-notification-agent" Feb 02 17:38:24 crc kubenswrapper[4835]: E0202 17:38:24.097616 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="sg-core" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.097621 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="sg-core" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.097792 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="proxy-httpd" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.097804 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="sg-core" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.097817 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="ceilometer-notification-agent" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.097826 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" containerName="ceilometer-central-agent" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.099558 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.104827 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.104833 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.104936 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.119385 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-run-httpd\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.119675 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.119807 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.119919 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr7kk\" (UniqueName: \"kubernetes.io/projected/448f7464-6641-44af-91cf-4ecdda729acf-kube-api-access-rr7kk\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.120054 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-log-httpd\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.120184 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-config-data\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.120335 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.120474 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-scripts\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.131552 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.221946 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr7kk\" (UniqueName: \"kubernetes.io/projected/448f7464-6641-44af-91cf-4ecdda729acf-kube-api-access-rr7kk\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.222194 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-log-httpd\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.222322 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-config-data\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.222425 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.222503 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-scripts\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.222611 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-run-httpd\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.222688 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.222867 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.223387 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-log-httpd\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.223753 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-run-httpd\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.227875 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-scripts\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.227917 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.228301 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.241054 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-config-data\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.241950 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.245129 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr7kk\" (UniqueName: \"kubernetes.io/projected/448f7464-6641-44af-91cf-4ecdda729acf-kube-api-access-rr7kk\") pod \"ceilometer-0\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.422665 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.614901 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:38:24 crc kubenswrapper[4835]: I0202 17:38:24.807751 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.202822 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ef884a8-061e-4f93-b4c9-9149a5f10f9f" path="/var/lib/kubelet/pods/5ef884a8-061e-4f93-b4c9-9149a5f10f9f/volumes" Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.260679 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.407579 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-69655fd4bf-zddfv" Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.451094 4835 scope.go:117] "RemoveContainer" containerID="645e2f4c25377d72277822fc61cb2669bfd3fad0f5444a02d456f99875ee0f4f" Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.531878 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-hwd6p"] Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.532440 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" podUID="38d5997d-17a2-4379-bf47-ff2ef2705e77" containerName="dnsmasq-dns" containerID="cri-o://955fdfdfd066e707901defaf326fb2def2d45f6b6c33529f1b8b5edc5cf0a938" gracePeriod=10 Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.762927 4835 generic.go:334] "Generic (PLEG): container finished" podID="86972842-42a5-46ce-b163-62b8f57571d3" containerID="0a25ec9dab326d88c4484560939e7ed123471fdb14773ad4d7a63c8d1d199fe9" exitCode=137 Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.763237 4835 generic.go:334] "Generic (PLEG): container finished" podID="86972842-42a5-46ce-b163-62b8f57571d3" containerID="b14a9001a4a0e9a0883b318471220f2b7d9a6ece766954c7255dc3cc693e6bec" exitCode=137 Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.763129 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db7c95659-6vg2h" event={"ID":"86972842-42a5-46ce-b163-62b8f57571d3","Type":"ContainerDied","Data":"0a25ec9dab326d88c4484560939e7ed123471fdb14773ad4d7a63c8d1d199fe9"} Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.763343 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db7c95659-6vg2h" event={"ID":"86972842-42a5-46ce-b163-62b8f57571d3","Type":"ContainerDied","Data":"b14a9001a4a0e9a0883b318471220f2b7d9a6ece766954c7255dc3cc693e6bec"} Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.775667 4835 generic.go:334] "Generic (PLEG): container finished" podID="38d5997d-17a2-4379-bf47-ff2ef2705e77" containerID="955fdfdfd066e707901defaf326fb2def2d45f6b6c33529f1b8b5edc5cf0a938" exitCode=0 Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.775738 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" event={"ID":"38d5997d-17a2-4379-bf47-ff2ef2705e77","Type":"ContainerDied","Data":"955fdfdfd066e707901defaf326fb2def2d45f6b6c33529f1b8b5edc5cf0a938"} Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.779723 4835 generic.go:334] "Generic (PLEG): container finished" podID="b89e0428-ff08-413f-aad7-6686319cf0fd" containerID="c5261cacd77fb17ee57d4cd4cc8eb2afcf9f859db0355ec1da50e2bf72223f12" exitCode=137 Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.779760 4835 generic.go:334] "Generic (PLEG): container finished" podID="b89e0428-ff08-413f-aad7-6686319cf0fd" containerID="3d32bc87b05b384ce9e37217abf2604df63c4e3ae386fefadae1670b8fcd0905" exitCode=137 Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.779778 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7787d795cf-xvfgz" event={"ID":"b89e0428-ff08-413f-aad7-6686319cf0fd","Type":"ContainerDied","Data":"c5261cacd77fb17ee57d4cd4cc8eb2afcf9f859db0355ec1da50e2bf72223f12"} Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.779834 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7787d795cf-xvfgz" event={"ID":"b89e0428-ff08-413f-aad7-6686319cf0fd","Type":"ContainerDied","Data":"3d32bc87b05b384ce9e37217abf2604df63c4e3ae386fefadae1670b8fcd0905"} Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.803926 4835 scope.go:117] "RemoveContainer" containerID="3daad7985b2b993065441bb8348f7de7403d33ae16d6c1b6b71129c87b3f5105" Feb 02 17:38:25 crc kubenswrapper[4835]: I0202 17:38:25.835290 4835 scope.go:117] "RemoveContainer" containerID="c72ca7ce364d4eb3149609bd6bfee1475b893e8d462415f7bb96626f8441952e" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.003009 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.107416 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.206405 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rj55q\" (UniqueName: \"kubernetes.io/projected/b89e0428-ff08-413f-aad7-6686319cf0fd-kube-api-access-rj55q\") pod \"b89e0428-ff08-413f-aad7-6686319cf0fd\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.207326 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-scripts\") pod \"b89e0428-ff08-413f-aad7-6686319cf0fd\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.207494 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b89e0428-ff08-413f-aad7-6686319cf0fd-logs\") pod \"b89e0428-ff08-413f-aad7-6686319cf0fd\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.207543 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b89e0428-ff08-413f-aad7-6686319cf0fd-horizon-secret-key\") pod \"b89e0428-ff08-413f-aad7-6686319cf0fd\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.207568 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-config-data\") pod \"b89e0428-ff08-413f-aad7-6686319cf0fd\" (UID: \"b89e0428-ff08-413f-aad7-6686319cf0fd\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.208235 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b89e0428-ff08-413f-aad7-6686319cf0fd-logs" (OuterVolumeSpecName: "logs") pod "b89e0428-ff08-413f-aad7-6686319cf0fd" (UID: "b89e0428-ff08-413f-aad7-6686319cf0fd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.214634 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b89e0428-ff08-413f-aad7-6686319cf0fd-kube-api-access-rj55q" (OuterVolumeSpecName: "kube-api-access-rj55q") pod "b89e0428-ff08-413f-aad7-6686319cf0fd" (UID: "b89e0428-ff08-413f-aad7-6686319cf0fd"). InnerVolumeSpecName "kube-api-access-rj55q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.214807 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b89e0428-ff08-413f-aad7-6686319cf0fd-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "b89e0428-ff08-413f-aad7-6686319cf0fd" (UID: "b89e0428-ff08-413f-aad7-6686319cf0fd"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.232204 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-scripts" (OuterVolumeSpecName: "scripts") pod "b89e0428-ff08-413f-aad7-6686319cf0fd" (UID: "b89e0428-ff08-413f-aad7-6686319cf0fd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.255434 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-config-data" (OuterVolumeSpecName: "config-data") pod "b89e0428-ff08-413f-aad7-6686319cf0fd" (UID: "b89e0428-ff08-413f-aad7-6686319cf0fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.268356 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.274592 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.319934 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-nb\") pod \"38d5997d-17a2-4379-bf47-ff2ef2705e77\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.319999 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlzrt\" (UniqueName: \"kubernetes.io/projected/38d5997d-17a2-4379-bf47-ff2ef2705e77-kube-api-access-jlzrt\") pod \"38d5997d-17a2-4379-bf47-ff2ef2705e77\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.320027 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-config\") pod \"38d5997d-17a2-4379-bf47-ff2ef2705e77\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.320159 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-sb\") pod \"38d5997d-17a2-4379-bf47-ff2ef2705e77\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.320195 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-openstack-edpm-ipam\") pod \"38d5997d-17a2-4379-bf47-ff2ef2705e77\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.320217 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-dns-svc\") pod \"38d5997d-17a2-4379-bf47-ff2ef2705e77\" (UID: \"38d5997d-17a2-4379-bf47-ff2ef2705e77\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.323974 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b89e0428-ff08-413f-aad7-6686319cf0fd-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.323990 4835 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b89e0428-ff08-413f-aad7-6686319cf0fd-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.324001 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.324009 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rj55q\" (UniqueName: \"kubernetes.io/projected/b89e0428-ff08-413f-aad7-6686319cf0fd-kube-api-access-rj55q\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.324018 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b89e0428-ff08-413f-aad7-6686319cf0fd-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.331206 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38d5997d-17a2-4379-bf47-ff2ef2705e77-kube-api-access-jlzrt" (OuterVolumeSpecName: "kube-api-access-jlzrt") pod "38d5997d-17a2-4379-bf47-ff2ef2705e77" (UID: "38d5997d-17a2-4379-bf47-ff2ef2705e77"). InnerVolumeSpecName "kube-api-access-jlzrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.362774 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.388524 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "38d5997d-17a2-4379-bf47-ff2ef2705e77" (UID: "38d5997d-17a2-4379-bf47-ff2ef2705e77"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.391570 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-config" (OuterVolumeSpecName: "config") pod "38d5997d-17a2-4379-bf47-ff2ef2705e77" (UID: "38d5997d-17a2-4379-bf47-ff2ef2705e77"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.402876 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "38d5997d-17a2-4379-bf47-ff2ef2705e77" (UID: "38d5997d-17a2-4379-bf47-ff2ef2705e77"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.422579 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "38d5997d-17a2-4379-bf47-ff2ef2705e77" (UID: "38d5997d-17a2-4379-bf47-ff2ef2705e77"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.424936 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8zrg\" (UniqueName: \"kubernetes.io/projected/86972842-42a5-46ce-b163-62b8f57571d3-kube-api-access-q8zrg\") pod \"86972842-42a5-46ce-b163-62b8f57571d3\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.425047 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86972842-42a5-46ce-b163-62b8f57571d3-logs\") pod \"86972842-42a5-46ce-b163-62b8f57571d3\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.425131 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-config-data\") pod \"86972842-42a5-46ce-b163-62b8f57571d3\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.425226 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/86972842-42a5-46ce-b163-62b8f57571d3-horizon-secret-key\") pod \"86972842-42a5-46ce-b163-62b8f57571d3\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.425376 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-scripts\") pod \"86972842-42a5-46ce-b163-62b8f57571d3\" (UID: \"86972842-42a5-46ce-b163-62b8f57571d3\") " Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.425808 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlzrt\" (UniqueName: \"kubernetes.io/projected/38d5997d-17a2-4379-bf47-ff2ef2705e77-kube-api-access-jlzrt\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.425822 4835 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-config\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.425830 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.425838 4835 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.425846 4835 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.426898 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86972842-42a5-46ce-b163-62b8f57571d3-logs" (OuterVolumeSpecName: "logs") pod "86972842-42a5-46ce-b163-62b8f57571d3" (UID: "86972842-42a5-46ce-b163-62b8f57571d3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.430288 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86972842-42a5-46ce-b163-62b8f57571d3-kube-api-access-q8zrg" (OuterVolumeSpecName: "kube-api-access-q8zrg") pod "86972842-42a5-46ce-b163-62b8f57571d3" (UID: "86972842-42a5-46ce-b163-62b8f57571d3"). InnerVolumeSpecName "kube-api-access-q8zrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.437102 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86972842-42a5-46ce-b163-62b8f57571d3-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "86972842-42a5-46ce-b163-62b8f57571d3" (UID: "86972842-42a5-46ce-b163-62b8f57571d3"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.458122 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-scripts" (OuterVolumeSpecName: "scripts") pod "86972842-42a5-46ce-b163-62b8f57571d3" (UID: "86972842-42a5-46ce-b163-62b8f57571d3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.460572 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "38d5997d-17a2-4379-bf47-ff2ef2705e77" (UID: "38d5997d-17a2-4379-bf47-ff2ef2705e77"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.484716 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-config-data" (OuterVolumeSpecName: "config-data") pod "86972842-42a5-46ce-b163-62b8f57571d3" (UID: "86972842-42a5-46ce-b163-62b8f57571d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.529486 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86972842-42a5-46ce-b163-62b8f57571d3-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.529517 4835 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38d5997d-17a2-4379-bf47-ff2ef2705e77-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.529529 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.529537 4835 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/86972842-42a5-46ce-b163-62b8f57571d3-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.529545 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/86972842-42a5-46ce-b163-62b8f57571d3-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.529553 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8zrg\" (UniqueName: \"kubernetes.io/projected/86972842-42a5-46ce-b163-62b8f57571d3-kube-api-access-q8zrg\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.799885 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7787d795cf-xvfgz" event={"ID":"b89e0428-ff08-413f-aad7-6686319cf0fd","Type":"ContainerDied","Data":"2cbb5faed0f216b2ebd5ac55d622e186fa42db5efc09c16c93ea6ef67214e608"} Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.800204 4835 scope.go:117] "RemoveContainer" containerID="c5261cacd77fb17ee57d4cd4cc8eb2afcf9f859db0355ec1da50e2bf72223f12" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.799906 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7787d795cf-xvfgz" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.801999 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"3a4997a1-3860-46d1-ba9f-a81c6800aec9","Type":"ContainerStarted","Data":"dc4308e2ae57204d6e940255abc82849a41afc75a445a9f43cb50d66f5271e3c"} Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.802036 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"3a4997a1-3860-46d1-ba9f-a81c6800aec9","Type":"ContainerStarted","Data":"aa2381759536a4dc0489bb30885b56a91224babdd8a3f316ab3c524500ae4e3e"} Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.805096 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db7c95659-6vg2h" event={"ID":"86972842-42a5-46ce-b163-62b8f57571d3","Type":"ContainerDied","Data":"e2e3489ec76605f638987443a804f2f0851b3f6d170ef4e4fa3fdfc0796a9d77"} Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.805309 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7db7c95659-6vg2h" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.819173 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"448f7464-6641-44af-91cf-4ecdda729acf","Type":"ContainerStarted","Data":"3666b6eb5afeb7d429c7f97021f393d69ac5d09e56939efd8e53cf706e02010b"} Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.823462 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" event={"ID":"38d5997d-17a2-4379-bf47-ff2ef2705e77","Type":"ContainerDied","Data":"185be7cc9129ce62f1b698a346a5425d2ed776f32a26018009bce11ff9686e80"} Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.823683 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbc59fbb7-hwd6p" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.844675 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.846450 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"3118cb6d-c193-41a4-8edd-444205a0020e","Type":"ContainerStarted","Data":"0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2"} Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.855842 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7787d795cf-xvfgz"] Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.867242 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7787d795cf-xvfgz"] Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.874893 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7db7c95659-6vg2h"] Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.885928 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7db7c95659-6vg2h"] Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.907470 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-hwd6p"] Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.916893 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-hwd6p"] Feb 02 17:38:26 crc kubenswrapper[4835]: I0202 17:38:26.952167 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5f575cdbb6-2fppg" Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.016084 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-56ddff97fb-66qgb"] Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.133130 4835 scope.go:117] "RemoveContainer" containerID="3d32bc87b05b384ce9e37217abf2604df63c4e3ae386fefadae1670b8fcd0905" Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.160474 4835 scope.go:117] "RemoveContainer" containerID="0a25ec9dab326d88c4484560939e7ed123471fdb14773ad4d7a63c8d1d199fe9" Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.254950 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38d5997d-17a2-4379-bf47-ff2ef2705e77" path="/var/lib/kubelet/pods/38d5997d-17a2-4379-bf47-ff2ef2705e77/volumes" Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.255844 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86972842-42a5-46ce-b163-62b8f57571d3" path="/var/lib/kubelet/pods/86972842-42a5-46ce-b163-62b8f57571d3/volumes" Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.257403 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b89e0428-ff08-413f-aad7-6686319cf0fd" path="/var/lib/kubelet/pods/b89e0428-ff08-413f-aad7-6686319cf0fd/volumes" Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.375874 4835 scope.go:117] "RemoveContainer" containerID="b14a9001a4a0e9a0883b318471220f2b7d9a6ece766954c7255dc3cc693e6bec" Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.482733 4835 scope.go:117] "RemoveContainer" containerID="955fdfdfd066e707901defaf326fb2def2d45f6b6c33529f1b8b5edc5cf0a938" Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.565167 4835 scope.go:117] "RemoveContainer" containerID="759ffb18a3684d96dc3a839088c978da4f2f17a9a00c0bc11ab37f5c638145a2" Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.856262 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"448f7464-6641-44af-91cf-4ecdda729acf","Type":"ContainerStarted","Data":"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82"} Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.859231 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"3118cb6d-c193-41a4-8edd-444205a0020e","Type":"ContainerStarted","Data":"58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98"} Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.864480 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"3a4997a1-3860-46d1-ba9f-a81c6800aec9","Type":"ContainerStarted","Data":"19aa058d63c0dc9237a821923da731c6ef71f80e52f53eb4a931a7b0abeadac0"} Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.864605 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.873168 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-56ddff97fb-66qgb" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon-log" containerID="cri-o://586adfcb28f1300cc5d1b9ca3414d9043ae073502e1c357412494828fd854505" gracePeriod=30 Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.873302 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-56ddff97fb-66qgb" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon" containerID="cri-o://b4a83f70f2990b82ef14ada3b958133f1bf6cc74c4248b3aaa4bf89501b57bef" gracePeriod=30 Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.892160 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=4.467675185 podStartE2EDuration="13.892140256s" podCreationTimestamp="2026-02-02 17:38:14 +0000 UTC" firstStartedPulling="2026-02-02 17:38:16.152716982 +0000 UTC m=+2887.774321062" lastFinishedPulling="2026-02-02 17:38:25.577182053 +0000 UTC m=+2897.198786133" observedRunningTime="2026-02-02 17:38:27.880514506 +0000 UTC m=+2899.502118596" watchObservedRunningTime="2026-02-02 17:38:27.892140256 +0000 UTC m=+2899.513744336" Feb 02 17:38:27 crc kubenswrapper[4835]: I0202 17:38:27.925754 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=7.925735509 podStartE2EDuration="7.925735509s" podCreationTimestamp="2026-02-02 17:38:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:38:27.904360022 +0000 UTC m=+2899.525964132" watchObservedRunningTime="2026-02-02 17:38:27.925735509 +0000 UTC m=+2899.547339579" Feb 02 17:38:29 crc kubenswrapper[4835]: I0202 17:38:29.892542 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"448f7464-6641-44af-91cf-4ecdda729acf","Type":"ContainerStarted","Data":"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808"} Feb 02 17:38:30 crc kubenswrapper[4835]: I0202 17:38:30.019012 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:30 crc kubenswrapper[4835]: I0202 17:38:30.927088 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"448f7464-6641-44af-91cf-4ecdda729acf","Type":"ContainerStarted","Data":"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6"} Feb 02 17:38:31 crc kubenswrapper[4835]: I0202 17:38:31.938525 4835 generic.go:334] "Generic (PLEG): container finished" podID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerID="b4a83f70f2990b82ef14ada3b958133f1bf6cc74c4248b3aaa4bf89501b57bef" exitCode=0 Feb 02 17:38:31 crc kubenswrapper[4835]: I0202 17:38:31.938596 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56ddff97fb-66qgb" event={"ID":"22b32279-2087-4a32-84ac-38c8b84d6a4d","Type":"ContainerDied","Data":"b4a83f70f2990b82ef14ada3b958133f1bf6cc74c4248b3aaa4bf89501b57bef"} Feb 02 17:38:32 crc kubenswrapper[4835]: I0202 17:38:32.695288 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-56ddff97fb-66qgb" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.248:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.248:8443: connect: connection refused" Feb 02 17:38:32 crc kubenswrapper[4835]: I0202 17:38:32.954688 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"448f7464-6641-44af-91cf-4ecdda729acf","Type":"ContainerStarted","Data":"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5"} Feb 02 17:38:32 crc kubenswrapper[4835]: I0202 17:38:32.954941 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 17:38:32 crc kubenswrapper[4835]: I0202 17:38:32.954964 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="ceilometer-central-agent" containerID="cri-o://a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82" gracePeriod=30 Feb 02 17:38:32 crc kubenswrapper[4835]: I0202 17:38:32.955012 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="proxy-httpd" containerID="cri-o://4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5" gracePeriod=30 Feb 02 17:38:32 crc kubenswrapper[4835]: I0202 17:38:32.955026 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="ceilometer-notification-agent" containerID="cri-o://37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808" gracePeriod=30 Feb 02 17:38:32 crc kubenswrapper[4835]: I0202 17:38:32.954995 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="sg-core" containerID="cri-o://ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6" gracePeriod=30 Feb 02 17:38:32 crc kubenswrapper[4835]: I0202 17:38:32.996675 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.977335981 podStartE2EDuration="8.996657458s" podCreationTimestamp="2026-02-02 17:38:24 +0000 UTC" firstStartedPulling="2026-02-02 17:38:26.399263809 +0000 UTC m=+2898.020867889" lastFinishedPulling="2026-02-02 17:38:32.418585246 +0000 UTC m=+2904.040189366" observedRunningTime="2026-02-02 17:38:32.994675891 +0000 UTC m=+2904.616280031" watchObservedRunningTime="2026-02-02 17:38:32.996657458 +0000 UTC m=+2904.618261528" Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.882508 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.970544 4835 generic.go:334] "Generic (PLEG): container finished" podID="448f7464-6641-44af-91cf-4ecdda729acf" containerID="4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5" exitCode=0 Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.970593 4835 generic.go:334] "Generic (PLEG): container finished" podID="448f7464-6641-44af-91cf-4ecdda729acf" containerID="ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6" exitCode=2 Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.970611 4835 generic.go:334] "Generic (PLEG): container finished" podID="448f7464-6641-44af-91cf-4ecdda729acf" containerID="37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808" exitCode=0 Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.970628 4835 generic.go:334] "Generic (PLEG): container finished" podID="448f7464-6641-44af-91cf-4ecdda729acf" containerID="a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82" exitCode=0 Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.970658 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"448f7464-6641-44af-91cf-4ecdda729acf","Type":"ContainerDied","Data":"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5"} Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.970695 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"448f7464-6641-44af-91cf-4ecdda729acf","Type":"ContainerDied","Data":"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6"} Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.970769 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"448f7464-6641-44af-91cf-4ecdda729acf","Type":"ContainerDied","Data":"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808"} Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.970794 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"448f7464-6641-44af-91cf-4ecdda729acf","Type":"ContainerDied","Data":"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82"} Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.970813 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"448f7464-6641-44af-91cf-4ecdda729acf","Type":"ContainerDied","Data":"3666b6eb5afeb7d429c7f97021f393d69ac5d09e56939efd8e53cf706e02010b"} Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.970891 4835 scope.go:117] "RemoveContainer" containerID="4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5" Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.971255 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:38:33 crc kubenswrapper[4835]: I0202 17:38:33.995129 4835 scope.go:117] "RemoveContainer" containerID="ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.012588 4835 scope.go:117] "RemoveContainer" containerID="37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.031307 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-sg-core-conf-yaml\") pod \"448f7464-6641-44af-91cf-4ecdda729acf\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.031429 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-scripts\") pod \"448f7464-6641-44af-91cf-4ecdda729acf\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.031502 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-ceilometer-tls-certs\") pod \"448f7464-6641-44af-91cf-4ecdda729acf\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.031570 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-config-data\") pod \"448f7464-6641-44af-91cf-4ecdda729acf\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.031648 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-log-httpd\") pod \"448f7464-6641-44af-91cf-4ecdda729acf\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.031707 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-run-httpd\") pod \"448f7464-6641-44af-91cf-4ecdda729acf\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.031780 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr7kk\" (UniqueName: \"kubernetes.io/projected/448f7464-6641-44af-91cf-4ecdda729acf-kube-api-access-rr7kk\") pod \"448f7464-6641-44af-91cf-4ecdda729acf\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.031822 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-combined-ca-bundle\") pod \"448f7464-6641-44af-91cf-4ecdda729acf\" (UID: \"448f7464-6641-44af-91cf-4ecdda729acf\") " Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.032741 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "448f7464-6641-44af-91cf-4ecdda729acf" (UID: "448f7464-6641-44af-91cf-4ecdda729acf"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.032800 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "448f7464-6641-44af-91cf-4ecdda729acf" (UID: "448f7464-6641-44af-91cf-4ecdda729acf"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.035538 4835 scope.go:117] "RemoveContainer" containerID="a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.055309 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-scripts" (OuterVolumeSpecName: "scripts") pod "448f7464-6641-44af-91cf-4ecdda729acf" (UID: "448f7464-6641-44af-91cf-4ecdda729acf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.055424 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/448f7464-6641-44af-91cf-4ecdda729acf-kube-api-access-rr7kk" (OuterVolumeSpecName: "kube-api-access-rr7kk") pod "448f7464-6641-44af-91cf-4ecdda729acf" (UID: "448f7464-6641-44af-91cf-4ecdda729acf"). InnerVolumeSpecName "kube-api-access-rr7kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.071371 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "448f7464-6641-44af-91cf-4ecdda729acf" (UID: "448f7464-6641-44af-91cf-4ecdda729acf"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.103698 4835 scope.go:117] "RemoveContainer" containerID="4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.104268 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5\": container with ID starting with 4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5 not found: ID does not exist" containerID="4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.104332 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5"} err="failed to get container status \"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5\": rpc error: code = NotFound desc = could not find container \"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5\": container with ID starting with 4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.104364 4835 scope.go:117] "RemoveContainer" containerID="ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.104716 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6\": container with ID starting with ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6 not found: ID does not exist" containerID="ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.104756 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6"} err="failed to get container status \"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6\": rpc error: code = NotFound desc = could not find container \"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6\": container with ID starting with ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.104778 4835 scope.go:117] "RemoveContainer" containerID="37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.105167 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808\": container with ID starting with 37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808 not found: ID does not exist" containerID="37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.105235 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808"} err="failed to get container status \"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808\": rpc error: code = NotFound desc = could not find container \"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808\": container with ID starting with 37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.105257 4835 scope.go:117] "RemoveContainer" containerID="a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.105687 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82\": container with ID starting with a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82 not found: ID does not exist" containerID="a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.105748 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82"} err="failed to get container status \"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82\": rpc error: code = NotFound desc = could not find container \"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82\": container with ID starting with a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.105784 4835 scope.go:117] "RemoveContainer" containerID="4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.106114 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5"} err="failed to get container status \"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5\": rpc error: code = NotFound desc = could not find container \"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5\": container with ID starting with 4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.106145 4835 scope.go:117] "RemoveContainer" containerID="ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.106470 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6"} err="failed to get container status \"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6\": rpc error: code = NotFound desc = could not find container \"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6\": container with ID starting with ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.106499 4835 scope.go:117] "RemoveContainer" containerID="37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.106838 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808"} err="failed to get container status \"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808\": rpc error: code = NotFound desc = could not find container \"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808\": container with ID starting with 37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.106893 4835 scope.go:117] "RemoveContainer" containerID="a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.107236 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82"} err="failed to get container status \"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82\": rpc error: code = NotFound desc = could not find container \"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82\": container with ID starting with a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.107264 4835 scope.go:117] "RemoveContainer" containerID="4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.107546 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5"} err="failed to get container status \"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5\": rpc error: code = NotFound desc = could not find container \"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5\": container with ID starting with 4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.107592 4835 scope.go:117] "RemoveContainer" containerID="ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.107924 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6"} err="failed to get container status \"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6\": rpc error: code = NotFound desc = could not find container \"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6\": container with ID starting with ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.107950 4835 scope.go:117] "RemoveContainer" containerID="37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.108239 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808"} err="failed to get container status \"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808\": rpc error: code = NotFound desc = could not find container \"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808\": container with ID starting with 37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.108297 4835 scope.go:117] "RemoveContainer" containerID="a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.108542 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82"} err="failed to get container status \"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82\": rpc error: code = NotFound desc = could not find container \"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82\": container with ID starting with a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.108571 4835 scope.go:117] "RemoveContainer" containerID="4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.108887 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5"} err="failed to get container status \"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5\": rpc error: code = NotFound desc = could not find container \"4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5\": container with ID starting with 4dc4a68ab24648777291e644836fb5819bf1493b8d002b065a7fdb4351d11ff5 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.108961 4835 scope.go:117] "RemoveContainer" containerID="ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.109291 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6"} err="failed to get container status \"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6\": rpc error: code = NotFound desc = could not find container \"ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6\": container with ID starting with ff716fb0dca82d19fa5e5e3d8468cf50de27a082e947d726476f0ca6d07f22c6 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.109319 4835 scope.go:117] "RemoveContainer" containerID="37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.109955 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808"} err="failed to get container status \"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808\": rpc error: code = NotFound desc = could not find container \"37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808\": container with ID starting with 37abeee6f6fac83ff1b5c1259ccf322e3eaba5ed34d9a97c72a6ea2ba67c4808 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.110032 4835 scope.go:117] "RemoveContainer" containerID="a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.110344 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82"} err="failed to get container status \"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82\": rpc error: code = NotFound desc = could not find container \"a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82\": container with ID starting with a7b616cd94e0f19627632a7242f4ea1eae51514e1712edf0d6ca2ff701543e82 not found: ID does not exist" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.110530 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "448f7464-6641-44af-91cf-4ecdda729acf" (UID: "448f7464-6641-44af-91cf-4ecdda729acf"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.123574 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "448f7464-6641-44af-91cf-4ecdda729acf" (UID: "448f7464-6641-44af-91cf-4ecdda729acf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.134138 4835 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.134162 4835 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/448f7464-6641-44af-91cf-4ecdda729acf-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.134172 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr7kk\" (UniqueName: \"kubernetes.io/projected/448f7464-6641-44af-91cf-4ecdda729acf-kube-api-access-rr7kk\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.134182 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.134190 4835 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.134198 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.134207 4835 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.153623 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-config-data" (OuterVolumeSpecName: "config-data") pod "448f7464-6641-44af-91cf-4ecdda729acf" (UID: "448f7464-6641-44af-91cf-4ecdda729acf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.236474 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/448f7464-6641-44af-91cf-4ecdda729acf-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.313843 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.344433 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.359181 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.359740 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="sg-core" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.359763 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="sg-core" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.359784 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b89e0428-ff08-413f-aad7-6686319cf0fd" containerName="horizon" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.359792 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b89e0428-ff08-413f-aad7-6686319cf0fd" containerName="horizon" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.359813 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b89e0428-ff08-413f-aad7-6686319cf0fd" containerName="horizon-log" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.359821 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b89e0428-ff08-413f-aad7-6686319cf0fd" containerName="horizon-log" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.359834 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38d5997d-17a2-4379-bf47-ff2ef2705e77" containerName="init" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.359842 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="38d5997d-17a2-4379-bf47-ff2ef2705e77" containerName="init" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.359861 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38d5997d-17a2-4379-bf47-ff2ef2705e77" containerName="dnsmasq-dns" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.359870 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="38d5997d-17a2-4379-bf47-ff2ef2705e77" containerName="dnsmasq-dns" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.359888 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86972842-42a5-46ce-b163-62b8f57571d3" containerName="horizon" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.359896 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="86972842-42a5-46ce-b163-62b8f57571d3" containerName="horizon" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.359918 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="ceilometer-notification-agent" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.359926 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="ceilometer-notification-agent" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.359940 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="proxy-httpd" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.359949 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="proxy-httpd" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.359964 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="ceilometer-central-agent" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.359972 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="ceilometer-central-agent" Feb 02 17:38:34 crc kubenswrapper[4835]: E0202 17:38:34.359990 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86972842-42a5-46ce-b163-62b8f57571d3" containerName="horizon-log" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.359998 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="86972842-42a5-46ce-b163-62b8f57571d3" containerName="horizon-log" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.360212 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="sg-core" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.360229 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="86972842-42a5-46ce-b163-62b8f57571d3" containerName="horizon" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.360241 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="86972842-42a5-46ce-b163-62b8f57571d3" containerName="horizon-log" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.360253 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="proxy-httpd" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.360268 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="ceilometer-central-agent" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.360309 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b89e0428-ff08-413f-aad7-6686319cf0fd" containerName="horizon-log" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.360324 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b89e0428-ff08-413f-aad7-6686319cf0fd" containerName="horizon" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.360343 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="38d5997d-17a2-4379-bf47-ff2ef2705e77" containerName="dnsmasq-dns" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.360359 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="448f7464-6641-44af-91cf-4ecdda729acf" containerName="ceilometer-notification-agent" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.363160 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.367430 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.373827 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.378902 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.384154 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.542963 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.543043 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05826cd2-708f-4ce4-bbfb-04a0e6206c12-log-httpd\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.543174 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4c9w\" (UniqueName: \"kubernetes.io/projected/05826cd2-708f-4ce4-bbfb-04a0e6206c12-kube-api-access-n4c9w\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.543211 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-config-data\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.543227 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.543258 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.543297 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-scripts\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.543320 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05826cd2-708f-4ce4-bbfb-04a0e6206c12-run-httpd\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.644791 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4c9w\" (UniqueName: \"kubernetes.io/projected/05826cd2-708f-4ce4-bbfb-04a0e6206c12-kube-api-access-n4c9w\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.646100 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-config-data\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.647030 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.647174 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.647387 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-scripts\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.647515 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05826cd2-708f-4ce4-bbfb-04a0e6206c12-run-httpd\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.647730 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.647834 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05826cd2-708f-4ce4-bbfb-04a0e6206c12-log-httpd\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.647872 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05826cd2-708f-4ce4-bbfb-04a0e6206c12-run-httpd\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.648400 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05826cd2-708f-4ce4-bbfb-04a0e6206c12-log-httpd\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.650493 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.651157 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-scripts\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.651351 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.651697 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-config-data\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.660103 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05826cd2-708f-4ce4-bbfb-04a0e6206c12-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.660992 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4c9w\" (UniqueName: \"kubernetes.io/projected/05826cd2-708f-4ce4-bbfb-04a0e6206c12-kube-api-access-n4c9w\") pod \"ceilometer-0\" (UID: \"05826cd2-708f-4ce4-bbfb-04a0e6206c12\") " pod="openstack/ceilometer-0" Feb 02 17:38:34 crc kubenswrapper[4835]: I0202 17:38:34.688820 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 17:38:35 crc kubenswrapper[4835]: I0202 17:38:35.163709 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 17:38:35 crc kubenswrapper[4835]: I0202 17:38:35.208568 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="448f7464-6641-44af-91cf-4ecdda729acf" path="/var/lib/kubelet/pods/448f7464-6641-44af-91cf-4ecdda729acf/volumes" Feb 02 17:38:35 crc kubenswrapper[4835]: I0202 17:38:35.289550 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Feb 02 17:38:35 crc kubenswrapper[4835]: I0202 17:38:35.995002 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05826cd2-708f-4ce4-bbfb-04a0e6206c12","Type":"ContainerStarted","Data":"2a975cac7879f7d009864f068c19cda44d21aab3834f1c7843616e964af6920b"} Feb 02 17:38:36 crc kubenswrapper[4835]: I0202 17:38:36.815761 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Feb 02 17:38:36 crc kubenswrapper[4835]: I0202 17:38:36.904650 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Feb 02 17:38:37 crc kubenswrapper[4835]: I0202 17:38:37.005922 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05826cd2-708f-4ce4-bbfb-04a0e6206c12","Type":"ContainerStarted","Data":"eacb16e3f2d67889ee003de48e04f8f4b493ac60ecfd23496a1b7e203b38e48a"} Feb 02 17:38:37 crc kubenswrapper[4835]: I0202 17:38:37.006140 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05826cd2-708f-4ce4-bbfb-04a0e6206c12","Type":"ContainerStarted","Data":"d82a337f056ffa5848c8b94d4720465eb67af167104466190bbffafb8fb3c531"} Feb 02 17:38:37 crc kubenswrapper[4835]: I0202 17:38:37.006046 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="0e892d57-f496-42fa-94fd-d8d458fa61cc" containerName="manila-scheduler" containerID="cri-o://4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d" gracePeriod=30 Feb 02 17:38:37 crc kubenswrapper[4835]: I0202 17:38:37.006382 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="0e892d57-f496-42fa-94fd-d8d458fa61cc" containerName="probe" containerID="cri-o://86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1" gracePeriod=30 Feb 02 17:38:38 crc kubenswrapper[4835]: I0202 17:38:38.028760 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05826cd2-708f-4ce4-bbfb-04a0e6206c12","Type":"ContainerStarted","Data":"53f372603ac96d723fcf450c42e8547b524cf521a38e592d3d1fc8e55e8740e6"} Feb 02 17:38:38 crc kubenswrapper[4835]: I0202 17:38:38.031888 4835 generic.go:334] "Generic (PLEG): container finished" podID="0e892d57-f496-42fa-94fd-d8d458fa61cc" containerID="86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1" exitCode=0 Feb 02 17:38:38 crc kubenswrapper[4835]: I0202 17:38:38.031937 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"0e892d57-f496-42fa-94fd-d8d458fa61cc","Type":"ContainerDied","Data":"86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1"} Feb 02 17:38:41 crc kubenswrapper[4835]: I0202 17:38:41.067841 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05826cd2-708f-4ce4-bbfb-04a0e6206c12","Type":"ContainerStarted","Data":"89d20e7a077a7032aef56a16b7468549bcdf429785d69be844573771e61fd387"} Feb 02 17:38:41 crc kubenswrapper[4835]: I0202 17:38:41.068139 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 17:38:41 crc kubenswrapper[4835]: I0202 17:38:41.110049 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.395242466 podStartE2EDuration="7.110031s" podCreationTimestamp="2026-02-02 17:38:34 +0000 UTC" firstStartedPulling="2026-02-02 17:38:35.176464875 +0000 UTC m=+2906.798068955" lastFinishedPulling="2026-02-02 17:38:39.891253409 +0000 UTC m=+2911.512857489" observedRunningTime="2026-02-02 17:38:41.094764747 +0000 UTC m=+2912.716368857" watchObservedRunningTime="2026-02-02 17:38:41.110031 +0000 UTC m=+2912.731635080" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.442965 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.562586 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.621505 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89gdr\" (UniqueName: \"kubernetes.io/projected/0e892d57-f496-42fa-94fd-d8d458fa61cc-kube-api-access-89gdr\") pod \"0e892d57-f496-42fa-94fd-d8d458fa61cc\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.621611 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-combined-ca-bundle\") pod \"0e892d57-f496-42fa-94fd-d8d458fa61cc\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.621703 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-scripts\") pod \"0e892d57-f496-42fa-94fd-d8d458fa61cc\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.621775 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data-custom\") pod \"0e892d57-f496-42fa-94fd-d8d458fa61cc\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.621815 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data\") pod \"0e892d57-f496-42fa-94fd-d8d458fa61cc\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.621893 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0e892d57-f496-42fa-94fd-d8d458fa61cc-etc-machine-id\") pod \"0e892d57-f496-42fa-94fd-d8d458fa61cc\" (UID: \"0e892d57-f496-42fa-94fd-d8d458fa61cc\") " Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.631899 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0e892d57-f496-42fa-94fd-d8d458fa61cc-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0e892d57-f496-42fa-94fd-d8d458fa61cc" (UID: "0e892d57-f496-42fa-94fd-d8d458fa61cc"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.646614 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e892d57-f496-42fa-94fd-d8d458fa61cc-kube-api-access-89gdr" (OuterVolumeSpecName: "kube-api-access-89gdr") pod "0e892d57-f496-42fa-94fd-d8d458fa61cc" (UID: "0e892d57-f496-42fa-94fd-d8d458fa61cc"). InnerVolumeSpecName "kube-api-access-89gdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.663944 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0e892d57-f496-42fa-94fd-d8d458fa61cc" (UID: "0e892d57-f496-42fa-94fd-d8d458fa61cc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.666672 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-scripts" (OuterVolumeSpecName: "scripts") pod "0e892d57-f496-42fa-94fd-d8d458fa61cc" (UID: "0e892d57-f496-42fa-94fd-d8d458fa61cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.695938 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-56ddff97fb-66qgb" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.248:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.248:8443: connect: connection refused" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.729873 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.729913 4835 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.729926 4835 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0e892d57-f496-42fa-94fd-d8d458fa61cc-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.729937 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89gdr\" (UniqueName: \"kubernetes.io/projected/0e892d57-f496-42fa-94fd-d8d458fa61cc-kube-api-access-89gdr\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.745113 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e892d57-f496-42fa-94fd-d8d458fa61cc" (UID: "0e892d57-f496-42fa-94fd-d8d458fa61cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.761061 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data" (OuterVolumeSpecName: "config-data") pod "0e892d57-f496-42fa-94fd-d8d458fa61cc" (UID: "0e892d57-f496-42fa-94fd-d8d458fa61cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.831255 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:42 crc kubenswrapper[4835]: I0202 17:38:42.831331 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e892d57-f496-42fa-94fd-d8d458fa61cc-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.089440 4835 generic.go:334] "Generic (PLEG): container finished" podID="0e892d57-f496-42fa-94fd-d8d458fa61cc" containerID="4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d" exitCode=0 Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.089501 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"0e892d57-f496-42fa-94fd-d8d458fa61cc","Type":"ContainerDied","Data":"4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d"} Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.089541 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"0e892d57-f496-42fa-94fd-d8d458fa61cc","Type":"ContainerDied","Data":"50d69ba1cf4d74fddd5af532f49f1d0dc759c0a12d78b9421ba2eecddca62490"} Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.089568 4835 scope.go:117] "RemoveContainer" containerID="86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.089595 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.123398 4835 scope.go:117] "RemoveContainer" containerID="4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.146807 4835 scope.go:117] "RemoveContainer" containerID="86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1" Feb 02 17:38:43 crc kubenswrapper[4835]: E0202 17:38:43.147890 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1\": container with ID starting with 86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1 not found: ID does not exist" containerID="86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.147926 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1"} err="failed to get container status \"86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1\": rpc error: code = NotFound desc = could not find container \"86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1\": container with ID starting with 86e72bd16c9ead1d9f095fd4217ff5387249541af48ec4ed604200e9733e8fa1 not found: ID does not exist" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.147964 4835 scope.go:117] "RemoveContainer" containerID="4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d" Feb 02 17:38:43 crc kubenswrapper[4835]: E0202 17:38:43.148584 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d\": container with ID starting with 4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d not found: ID does not exist" containerID="4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.148612 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d"} err="failed to get container status \"4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d\": rpc error: code = NotFound desc = could not find container \"4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d\": container with ID starting with 4f6f0841b7692a80c8fa17e29fa411be93f2d347ab9c1792f948e84040d8623d not found: ID does not exist" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.151357 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.160850 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-scheduler-0"] Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.171527 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Feb 02 17:38:43 crc kubenswrapper[4835]: E0202 17:38:43.172035 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e892d57-f496-42fa-94fd-d8d458fa61cc" containerName="manila-scheduler" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.172057 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e892d57-f496-42fa-94fd-d8d458fa61cc" containerName="manila-scheduler" Feb 02 17:38:43 crc kubenswrapper[4835]: E0202 17:38:43.172088 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e892d57-f496-42fa-94fd-d8d458fa61cc" containerName="probe" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.172099 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e892d57-f496-42fa-94fd-d8d458fa61cc" containerName="probe" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.172328 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e892d57-f496-42fa-94fd-d8d458fa61cc" containerName="manila-scheduler" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.172353 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e892d57-f496-42fa-94fd-d8d458fa61cc" containerName="probe" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.173582 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.175694 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.201036 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e892d57-f496-42fa-94fd-d8d458fa61cc" path="/var/lib/kubelet/pods/0e892d57-f496-42fa-94fd-d8d458fa61cc/volumes" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.201703 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.343599 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.343969 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-scripts\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.344141 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4f49a042-6b94-4a36-8607-1eb164147d96-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.344175 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-config-data\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.344215 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzwmw\" (UniqueName: \"kubernetes.io/projected/4f49a042-6b94-4a36-8607-1eb164147d96-kube-api-access-bzwmw\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.344403 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.446644 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.446729 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.446754 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-scripts\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.446836 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4f49a042-6b94-4a36-8607-1eb164147d96-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.446853 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-config-data\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.446876 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzwmw\" (UniqueName: \"kubernetes.io/projected/4f49a042-6b94-4a36-8607-1eb164147d96-kube-api-access-bzwmw\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.447792 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4f49a042-6b94-4a36-8607-1eb164147d96-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.452914 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-scripts\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.452975 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.453024 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.466454 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f49a042-6b94-4a36-8607-1eb164147d96-config-data\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.469113 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzwmw\" (UniqueName: \"kubernetes.io/projected/4f49a042-6b94-4a36-8607-1eb164147d96-kube-api-access-bzwmw\") pod \"manila-scheduler-0\" (UID: \"4f49a042-6b94-4a36-8607-1eb164147d96\") " pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.521984 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Feb 02 17:38:43 crc kubenswrapper[4835]: I0202 17:38:43.989344 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Feb 02 17:38:43 crc kubenswrapper[4835]: W0202 17:38:43.992390 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f49a042_6b94_4a36_8607_1eb164147d96.slice/crio-b2eed6c738233b2bc1d0af7f77b4920a730649cb84bab9367c7d5d9bbda7811b WatchSource:0}: Error finding container b2eed6c738233b2bc1d0af7f77b4920a730649cb84bab9367c7d5d9bbda7811b: Status 404 returned error can't find the container with id b2eed6c738233b2bc1d0af7f77b4920a730649cb84bab9367c7d5d9bbda7811b Feb 02 17:38:44 crc kubenswrapper[4835]: I0202 17:38:44.098494 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"4f49a042-6b94-4a36-8607-1eb164147d96","Type":"ContainerStarted","Data":"b2eed6c738233b2bc1d0af7f77b4920a730649cb84bab9367c7d5d9bbda7811b"} Feb 02 17:38:44 crc kubenswrapper[4835]: I0202 17:38:44.870019 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:38:44 crc kubenswrapper[4835]: I0202 17:38:44.870713 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:38:44 crc kubenswrapper[4835]: I0202 17:38:44.870766 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:38:44 crc kubenswrapper[4835]: I0202 17:38:44.871654 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8c0c1dd28e739785aa59f1b10ef0e393360cdd138b956085fece6a9d4036c389"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:38:44 crc kubenswrapper[4835]: I0202 17:38:44.871713 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://8c0c1dd28e739785aa59f1b10ef0e393360cdd138b956085fece6a9d4036c389" gracePeriod=600 Feb 02 17:38:45 crc kubenswrapper[4835]: I0202 17:38:45.119532 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"4f49a042-6b94-4a36-8607-1eb164147d96","Type":"ContainerStarted","Data":"a641550482c285da75ec6a88500fa7e54ea5dc10ad64e2adf7a64cf3918ec337"} Feb 02 17:38:45 crc kubenswrapper[4835]: I0202 17:38:45.119572 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"4f49a042-6b94-4a36-8607-1eb164147d96","Type":"ContainerStarted","Data":"f50cd7f081524e47a33d0c387fb4eb1c6353e76937963e54c14df1fffd52e187"} Feb 02 17:38:45 crc kubenswrapper[4835]: I0202 17:38:45.124631 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="8c0c1dd28e739785aa59f1b10ef0e393360cdd138b956085fece6a9d4036c389" exitCode=0 Feb 02 17:38:45 crc kubenswrapper[4835]: I0202 17:38:45.124663 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"8c0c1dd28e739785aa59f1b10ef0e393360cdd138b956085fece6a9d4036c389"} Feb 02 17:38:45 crc kubenswrapper[4835]: I0202 17:38:45.124686 4835 scope.go:117] "RemoveContainer" containerID="015f7f9f19018b77d8f5cb25df708aaab1fe08e8375645cbd2422cbbfb63c63b" Feb 02 17:38:45 crc kubenswrapper[4835]: I0202 17:38:45.141986 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.141962199 podStartE2EDuration="2.141962199s" podCreationTimestamp="2026-02-02 17:38:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:38:45.137711679 +0000 UTC m=+2916.759315759" watchObservedRunningTime="2026-02-02 17:38:45.141962199 +0000 UTC m=+2916.763566309" Feb 02 17:38:46 crc kubenswrapper[4835]: I0202 17:38:46.139957 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b"} Feb 02 17:38:46 crc kubenswrapper[4835]: I0202 17:38:46.787158 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Feb 02 17:38:46 crc kubenswrapper[4835]: I0202 17:38:46.854091 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Feb 02 17:38:47 crc kubenswrapper[4835]: I0202 17:38:47.147878 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="3118cb6d-c193-41a4-8edd-444205a0020e" containerName="manila-share" containerID="cri-o://0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2" gracePeriod=30 Feb 02 17:38:47 crc kubenswrapper[4835]: I0202 17:38:47.147988 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="3118cb6d-c193-41a4-8edd-444205a0020e" containerName="probe" containerID="cri-o://58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98" gracePeriod=30 Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.057626 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.142301 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-var-lib-manila\") pod \"3118cb6d-c193-41a4-8edd-444205a0020e\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.142430 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwxdg\" (UniqueName: \"kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-kube-api-access-fwxdg\") pod \"3118cb6d-c193-41a4-8edd-444205a0020e\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.142480 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data\") pod \"3118cb6d-c193-41a4-8edd-444205a0020e\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.142534 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-etc-machine-id\") pod \"3118cb6d-c193-41a4-8edd-444205a0020e\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.142559 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-ceph\") pod \"3118cb6d-c193-41a4-8edd-444205a0020e\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.142593 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-scripts\") pod \"3118cb6d-c193-41a4-8edd-444205a0020e\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.142629 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data-custom\") pod \"3118cb6d-c193-41a4-8edd-444205a0020e\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.142648 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-combined-ca-bundle\") pod \"3118cb6d-c193-41a4-8edd-444205a0020e\" (UID: \"3118cb6d-c193-41a4-8edd-444205a0020e\") " Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.142837 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3118cb6d-c193-41a4-8edd-444205a0020e" (UID: "3118cb6d-c193-41a4-8edd-444205a0020e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.142874 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "3118cb6d-c193-41a4-8edd-444205a0020e" (UID: "3118cb6d-c193-41a4-8edd-444205a0020e"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.144209 4835 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-var-lib-manila\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.144241 4835 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3118cb6d-c193-41a4-8edd-444205a0020e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.150694 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3118cb6d-c193-41a4-8edd-444205a0020e" (UID: "3118cb6d-c193-41a4-8edd-444205a0020e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.150699 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-kube-api-access-fwxdg" (OuterVolumeSpecName: "kube-api-access-fwxdg") pod "3118cb6d-c193-41a4-8edd-444205a0020e" (UID: "3118cb6d-c193-41a4-8edd-444205a0020e"). InnerVolumeSpecName "kube-api-access-fwxdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.150816 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-ceph" (OuterVolumeSpecName: "ceph") pod "3118cb6d-c193-41a4-8edd-444205a0020e" (UID: "3118cb6d-c193-41a4-8edd-444205a0020e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.154910 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-scripts" (OuterVolumeSpecName: "scripts") pod "3118cb6d-c193-41a4-8edd-444205a0020e" (UID: "3118cb6d-c193-41a4-8edd-444205a0020e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.173573 4835 generic.go:334] "Generic (PLEG): container finished" podID="3118cb6d-c193-41a4-8edd-444205a0020e" containerID="58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98" exitCode=0 Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.173611 4835 generic.go:334] "Generic (PLEG): container finished" podID="3118cb6d-c193-41a4-8edd-444205a0020e" containerID="0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2" exitCode=1 Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.173623 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.173637 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"3118cb6d-c193-41a4-8edd-444205a0020e","Type":"ContainerDied","Data":"58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98"} Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.173677 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"3118cb6d-c193-41a4-8edd-444205a0020e","Type":"ContainerDied","Data":"0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2"} Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.173692 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"3118cb6d-c193-41a4-8edd-444205a0020e","Type":"ContainerDied","Data":"a8057768d028c90abab5a16661d0ca9a034a1d26e643704901bb78f207e82854"} Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.173710 4835 scope.go:117] "RemoveContainer" containerID="58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.220585 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3118cb6d-c193-41a4-8edd-444205a0020e" (UID: "3118cb6d-c193-41a4-8edd-444205a0020e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.243857 4835 scope.go:117] "RemoveContainer" containerID="0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.245644 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwxdg\" (UniqueName: \"kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-kube-api-access-fwxdg\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.245684 4835 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3118cb6d-c193-41a4-8edd-444205a0020e-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.245698 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.245711 4835 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.245723 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.276121 4835 scope.go:117] "RemoveContainer" containerID="58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98" Feb 02 17:38:48 crc kubenswrapper[4835]: E0202 17:38:48.276623 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98\": container with ID starting with 58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98 not found: ID does not exist" containerID="58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.276662 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98"} err="failed to get container status \"58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98\": rpc error: code = NotFound desc = could not find container \"58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98\": container with ID starting with 58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98 not found: ID does not exist" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.276690 4835 scope.go:117] "RemoveContainer" containerID="0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2" Feb 02 17:38:48 crc kubenswrapper[4835]: E0202 17:38:48.276986 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2\": container with ID starting with 0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2 not found: ID does not exist" containerID="0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.277012 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2"} err="failed to get container status \"0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2\": rpc error: code = NotFound desc = could not find container \"0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2\": container with ID starting with 0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2 not found: ID does not exist" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.277031 4835 scope.go:117] "RemoveContainer" containerID="58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.277498 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98"} err="failed to get container status \"58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98\": rpc error: code = NotFound desc = could not find container \"58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98\": container with ID starting with 58e19ede7f4f4be8e6aa6b818560e40e0125ca184f6bff84d383082c3e364b98 not found: ID does not exist" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.277550 4835 scope.go:117] "RemoveContainer" containerID="0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.278047 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2"} err="failed to get container status \"0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2\": rpc error: code = NotFound desc = could not find container \"0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2\": container with ID starting with 0cf6791c1a99c04d747f9bae041f80e9b311a1920b5a60ed15143e5ab800e7d2 not found: ID does not exist" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.280685 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data" (OuterVolumeSpecName: "config-data") pod "3118cb6d-c193-41a4-8edd-444205a0020e" (UID: "3118cb6d-c193-41a4-8edd-444205a0020e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.348125 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3118cb6d-c193-41a4-8edd-444205a0020e-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.561976 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.569028 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-share-share1-0"] Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.592205 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Feb 02 17:38:48 crc kubenswrapper[4835]: E0202 17:38:48.593029 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3118cb6d-c193-41a4-8edd-444205a0020e" containerName="probe" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.593053 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3118cb6d-c193-41a4-8edd-444205a0020e" containerName="probe" Feb 02 17:38:48 crc kubenswrapper[4835]: E0202 17:38:48.593074 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3118cb6d-c193-41a4-8edd-444205a0020e" containerName="manila-share" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.593083 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3118cb6d-c193-41a4-8edd-444205a0020e" containerName="manila-share" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.593323 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="3118cb6d-c193-41a4-8edd-444205a0020e" containerName="manila-share" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.593344 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="3118cb6d-c193-41a4-8edd-444205a0020e" containerName="probe" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.594344 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.596929 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.603179 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.653762 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/eb3ede9c-1564-450a-b0c5-034c5ff8d285-ceph\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.653816 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-scripts\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.653865 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eb3ede9c-1564-450a-b0c5-034c5ff8d285-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.653924 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/eb3ede9c-1564-450a-b0c5-034c5ff8d285-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.653941 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.654126 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.654237 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-config-data\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.654306 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdqmj\" (UniqueName: \"kubernetes.io/projected/eb3ede9c-1564-450a-b0c5-034c5ff8d285-kube-api-access-wdqmj\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.755874 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-config-data\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.755937 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdqmj\" (UniqueName: \"kubernetes.io/projected/eb3ede9c-1564-450a-b0c5-034c5ff8d285-kube-api-access-wdqmj\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.756043 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/eb3ede9c-1564-450a-b0c5-034c5ff8d285-ceph\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.756093 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-scripts\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.756162 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eb3ede9c-1564-450a-b0c5-034c5ff8d285-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.756207 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/eb3ede9c-1564-450a-b0c5-034c5ff8d285-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.756228 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.756326 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.756472 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eb3ede9c-1564-450a-b0c5-034c5ff8d285-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.756550 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/eb3ede9c-1564-450a-b0c5-034c5ff8d285-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.759608 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-scripts\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.759839 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/eb3ede9c-1564-450a-b0c5-034c5ff8d285-ceph\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.760329 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-config-data\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.760767 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.761101 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eb3ede9c-1564-450a-b0c5-034c5ff8d285-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.780032 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdqmj\" (UniqueName: \"kubernetes.io/projected/eb3ede9c-1564-450a-b0c5-034c5ff8d285-kube-api-access-wdqmj\") pod \"manila-share-share1-0\" (UID: \"eb3ede9c-1564-450a-b0c5-034c5ff8d285\") " pod="openstack/manila-share-share1-0" Feb 02 17:38:48 crc kubenswrapper[4835]: I0202 17:38:48.910227 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Feb 02 17:38:49 crc kubenswrapper[4835]: I0202 17:38:49.229248 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3118cb6d-c193-41a4-8edd-444205a0020e" path="/var/lib/kubelet/pods/3118cb6d-c193-41a4-8edd-444205a0020e/volumes" Feb 02 17:38:49 crc kubenswrapper[4835]: I0202 17:38:49.471882 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Feb 02 17:38:49 crc kubenswrapper[4835]: W0202 17:38:49.472427 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb3ede9c_1564_450a_b0c5_034c5ff8d285.slice/crio-d84371c7d4b1840f09ffe911dfc7107234af989dda1462f5537cae4ff7688ecc WatchSource:0}: Error finding container d84371c7d4b1840f09ffe911dfc7107234af989dda1462f5537cae4ff7688ecc: Status 404 returned error can't find the container with id d84371c7d4b1840f09ffe911dfc7107234af989dda1462f5537cae4ff7688ecc Feb 02 17:38:50 crc kubenswrapper[4835]: I0202 17:38:50.207655 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"eb3ede9c-1564-450a-b0c5-034c5ff8d285","Type":"ContainerStarted","Data":"88ff745d6541ddddd47d84f275b288438ecb74fa6c16752cd208b38c5e282875"} Feb 02 17:38:50 crc kubenswrapper[4835]: I0202 17:38:50.208064 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"eb3ede9c-1564-450a-b0c5-034c5ff8d285","Type":"ContainerStarted","Data":"d84371c7d4b1840f09ffe911dfc7107234af989dda1462f5537cae4ff7688ecc"} Feb 02 17:38:51 crc kubenswrapper[4835]: I0202 17:38:51.218625 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"eb3ede9c-1564-450a-b0c5-034c5ff8d285","Type":"ContainerStarted","Data":"47cdd1e6f6f73041b91e9cb21f39482c31faf495eebea5ec51d9f6b4fc829790"} Feb 02 17:38:51 crc kubenswrapper[4835]: I0202 17:38:51.255613 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.255592102 podStartE2EDuration="3.255592102s" podCreationTimestamp="2026-02-02 17:38:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 17:38:51.25094163 +0000 UTC m=+2922.872545720" watchObservedRunningTime="2026-02-02 17:38:51.255592102 +0000 UTC m=+2922.877196182" Feb 02 17:38:52 crc kubenswrapper[4835]: I0202 17:38:52.696339 4835 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-56ddff97fb-66qgb" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.248:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.248:8443: connect: connection refused" Feb 02 17:38:52 crc kubenswrapper[4835]: I0202 17:38:52.697128 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:38:53 crc kubenswrapper[4835]: I0202 17:38:53.522209 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.297443 4835 generic.go:334] "Generic (PLEG): container finished" podID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerID="586adfcb28f1300cc5d1b9ca3414d9043ae073502e1c357412494828fd854505" exitCode=137 Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.297521 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56ddff97fb-66qgb" event={"ID":"22b32279-2087-4a32-84ac-38c8b84d6a4d","Type":"ContainerDied","Data":"586adfcb28f1300cc5d1b9ca3414d9043ae073502e1c357412494828fd854505"} Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.297913 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56ddff97fb-66qgb" event={"ID":"22b32279-2087-4a32-84ac-38c8b84d6a4d","Type":"ContainerDied","Data":"bc6a4c1441995e1acab1285b4f85666e78c0342aa8e3adbb5dec578edad0c90e"} Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.297930 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc6a4c1441995e1acab1285b4f85666e78c0342aa8e3adbb5dec578edad0c90e" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.328013 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.447486 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-secret-key\") pod \"22b32279-2087-4a32-84ac-38c8b84d6a4d\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.447600 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-config-data\") pod \"22b32279-2087-4a32-84ac-38c8b84d6a4d\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.447813 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-scripts\") pod \"22b32279-2087-4a32-84ac-38c8b84d6a4d\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.447883 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22b32279-2087-4a32-84ac-38c8b84d6a4d-logs\") pod \"22b32279-2087-4a32-84ac-38c8b84d6a4d\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.447926 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-combined-ca-bundle\") pod \"22b32279-2087-4a32-84ac-38c8b84d6a4d\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.447981 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzqpq\" (UniqueName: \"kubernetes.io/projected/22b32279-2087-4a32-84ac-38c8b84d6a4d-kube-api-access-gzqpq\") pod \"22b32279-2087-4a32-84ac-38c8b84d6a4d\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.448057 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-tls-certs\") pod \"22b32279-2087-4a32-84ac-38c8b84d6a4d\" (UID: \"22b32279-2087-4a32-84ac-38c8b84d6a4d\") " Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.449860 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22b32279-2087-4a32-84ac-38c8b84d6a4d-logs" (OuterVolumeSpecName: "logs") pod "22b32279-2087-4a32-84ac-38c8b84d6a4d" (UID: "22b32279-2087-4a32-84ac-38c8b84d6a4d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.455971 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22b32279-2087-4a32-84ac-38c8b84d6a4d-kube-api-access-gzqpq" (OuterVolumeSpecName: "kube-api-access-gzqpq") pod "22b32279-2087-4a32-84ac-38c8b84d6a4d" (UID: "22b32279-2087-4a32-84ac-38c8b84d6a4d"). InnerVolumeSpecName "kube-api-access-gzqpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.458467 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "22b32279-2087-4a32-84ac-38c8b84d6a4d" (UID: "22b32279-2087-4a32-84ac-38c8b84d6a4d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.477143 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-scripts" (OuterVolumeSpecName: "scripts") pod "22b32279-2087-4a32-84ac-38c8b84d6a4d" (UID: "22b32279-2087-4a32-84ac-38c8b84d6a4d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.496960 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22b32279-2087-4a32-84ac-38c8b84d6a4d" (UID: "22b32279-2087-4a32-84ac-38c8b84d6a4d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.503551 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-config-data" (OuterVolumeSpecName: "config-data") pod "22b32279-2087-4a32-84ac-38c8b84d6a4d" (UID: "22b32279-2087-4a32-84ac-38c8b84d6a4d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.515077 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "22b32279-2087-4a32-84ac-38c8b84d6a4d" (UID: "22b32279-2087-4a32-84ac-38c8b84d6a4d"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.550664 4835 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22b32279-2087-4a32-84ac-38c8b84d6a4d-logs\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.550713 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.550734 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzqpq\" (UniqueName: \"kubernetes.io/projected/22b32279-2087-4a32-84ac-38c8b84d6a4d-kube-api-access-gzqpq\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.550753 4835 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.550770 4835 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/22b32279-2087-4a32-84ac-38c8b84d6a4d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.550786 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.550802 4835 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/22b32279-2087-4a32-84ac-38c8b84d6a4d-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 17:38:58 crc kubenswrapper[4835]: I0202 17:38:58.911014 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Feb 02 17:38:59 crc kubenswrapper[4835]: I0202 17:38:59.310783 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56ddff97fb-66qgb" Feb 02 17:38:59 crc kubenswrapper[4835]: I0202 17:38:59.353001 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-56ddff97fb-66qgb"] Feb 02 17:38:59 crc kubenswrapper[4835]: I0202 17:38:59.362952 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-56ddff97fb-66qgb"] Feb 02 17:39:01 crc kubenswrapper[4835]: I0202 17:39:01.208973 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" path="/var/lib/kubelet/pods/22b32279-2087-4a32-84ac-38c8b84d6a4d/volumes" Feb 02 17:39:04 crc kubenswrapper[4835]: I0202 17:39:04.717767 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 17:39:05 crc kubenswrapper[4835]: I0202 17:39:05.036204 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Feb 02 17:39:10 crc kubenswrapper[4835]: I0202 17:39:10.489944 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.536913 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Feb 02 17:39:56 crc kubenswrapper[4835]: E0202 17:39:56.537916 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon-log" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.537934 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon-log" Feb 02 17:39:56 crc kubenswrapper[4835]: E0202 17:39:56.537971 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.537978 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.538181 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.538201 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="22b32279-2087-4a32-84ac-38c8b84d6a4d" containerName="horizon-log" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.538974 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.541182 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-m7gcn" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.542377 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.542715 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.542809 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.551090 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.649771 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.649820 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.649947 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-config-data\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.649995 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.650045 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.650081 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xssh5\" (UniqueName: \"kubernetes.io/projected/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-kube-api-access-xssh5\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.650118 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.650155 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.650228 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.751794 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.751844 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.751868 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-config-data\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.751891 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.751924 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.751945 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xssh5\" (UniqueName: \"kubernetes.io/projected/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-kube-api-access-xssh5\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.751970 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.751994 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.752031 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.752385 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.752628 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.752782 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.753239 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.753336 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-config-data\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.758156 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.759253 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.759959 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.774030 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xssh5\" (UniqueName: \"kubernetes.io/projected/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-kube-api-access-xssh5\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.781546 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " pod="openstack/tempest-tests-tempest" Feb 02 17:39:56 crc kubenswrapper[4835]: I0202 17:39:56.884304 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 02 17:39:57 crc kubenswrapper[4835]: I0202 17:39:57.371516 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Feb 02 17:39:57 crc kubenswrapper[4835]: I0202 17:39:57.848173 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"0cfd7d28-c17f-4035-bd42-89b10e3c60eb","Type":"ContainerStarted","Data":"6e464d6c4de3bf6231156f09bce6c58e3818a9387ba4a690d707c21effcab8f8"} Feb 02 17:40:32 crc kubenswrapper[4835]: E0202 17:40:32.546332 4835 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Feb 02 17:40:32 crc kubenswrapper[4835]: E0202 17:40:32.547257 4835 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xssh5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(0cfd7d28-c17f-4035-bd42-89b10e3c60eb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 17:40:32 crc kubenswrapper[4835]: E0202 17:40:32.548515 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="0cfd7d28-c17f-4035-bd42-89b10e3c60eb" Feb 02 17:40:33 crc kubenswrapper[4835]: E0202 17:40:33.213900 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="0cfd7d28-c17f-4035-bd42-89b10e3c60eb" Feb 02 17:40:47 crc kubenswrapper[4835]: I0202 17:40:47.192219 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 17:40:47 crc kubenswrapper[4835]: I0202 17:40:47.877329 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Feb 02 17:40:49 crc kubenswrapper[4835]: I0202 17:40:49.404068 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"0cfd7d28-c17f-4035-bd42-89b10e3c60eb","Type":"ContainerStarted","Data":"7db1f086ff62eb0e2c7cbb2960d3aa113927d0eac76c4774620f36beb67cae00"} Feb 02 17:40:49 crc kubenswrapper[4835]: I0202 17:40:49.424304 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.925114979 podStartE2EDuration="54.424289022s" podCreationTimestamp="2026-02-02 17:39:55 +0000 UTC" firstStartedPulling="2026-02-02 17:39:57.37568583 +0000 UTC m=+2988.997289910" lastFinishedPulling="2026-02-02 17:40:47.874859873 +0000 UTC m=+3039.496463953" observedRunningTime="2026-02-02 17:40:49.417533241 +0000 UTC m=+3041.039137321" watchObservedRunningTime="2026-02-02 17:40:49.424289022 +0000 UTC m=+3041.045893102" Feb 02 17:41:14 crc kubenswrapper[4835]: I0202 17:41:14.870604 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:41:14 crc kubenswrapper[4835]: I0202 17:41:14.871174 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:41:44 crc kubenswrapper[4835]: I0202 17:41:44.870866 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:41:44 crc kubenswrapper[4835]: I0202 17:41:44.871324 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:42:14 crc kubenswrapper[4835]: I0202 17:42:14.870311 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:42:14 crc kubenswrapper[4835]: I0202 17:42:14.870901 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:42:14 crc kubenswrapper[4835]: I0202 17:42:14.870983 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:42:14 crc kubenswrapper[4835]: I0202 17:42:14.872258 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:42:14 crc kubenswrapper[4835]: I0202 17:42:14.872415 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" gracePeriod=600 Feb 02 17:42:15 crc kubenswrapper[4835]: E0202 17:42:15.022756 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:42:15 crc kubenswrapper[4835]: I0202 17:42:15.250496 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b"} Feb 02 17:42:15 crc kubenswrapper[4835]: I0202 17:42:15.250908 4835 scope.go:117] "RemoveContainer" containerID="8c0c1dd28e739785aa59f1b10ef0e393360cdd138b956085fece6a9d4036c389" Feb 02 17:42:15 crc kubenswrapper[4835]: I0202 17:42:15.250438 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" exitCode=0 Feb 02 17:42:15 crc kubenswrapper[4835]: I0202 17:42:15.251752 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:42:15 crc kubenswrapper[4835]: E0202 17:42:15.252239 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:42:30 crc kubenswrapper[4835]: I0202 17:42:30.193690 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:42:30 crc kubenswrapper[4835]: E0202 17:42:30.194424 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:42:43 crc kubenswrapper[4835]: I0202 17:42:43.189394 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:42:43 crc kubenswrapper[4835]: E0202 17:42:43.190318 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:42:57 crc kubenswrapper[4835]: I0202 17:42:57.189386 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:42:57 crc kubenswrapper[4835]: E0202 17:42:57.190376 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:43:08 crc kubenswrapper[4835]: I0202 17:43:08.189682 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:43:08 crc kubenswrapper[4835]: E0202 17:43:08.190531 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:43:19 crc kubenswrapper[4835]: I0202 17:43:19.198351 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:43:19 crc kubenswrapper[4835]: E0202 17:43:19.199296 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:43:34 crc kubenswrapper[4835]: I0202 17:43:34.188613 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:43:34 crc kubenswrapper[4835]: E0202 17:43:34.189386 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:43:46 crc kubenswrapper[4835]: I0202 17:43:46.188611 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:43:46 crc kubenswrapper[4835]: E0202 17:43:46.189257 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:43:58 crc kubenswrapper[4835]: I0202 17:43:58.188475 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:43:58 crc kubenswrapper[4835]: E0202 17:43:58.189248 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:44:03 crc kubenswrapper[4835]: I0202 17:44:03.734503 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vr6w7"] Feb 02 17:44:03 crc kubenswrapper[4835]: I0202 17:44:03.739926 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:03 crc kubenswrapper[4835]: I0202 17:44:03.768265 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vr6w7"] Feb 02 17:44:03 crc kubenswrapper[4835]: I0202 17:44:03.897458 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-utilities\") pod \"redhat-operators-vr6w7\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:03 crc kubenswrapper[4835]: I0202 17:44:03.897962 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv677\" (UniqueName: \"kubernetes.io/projected/28a497a6-0b68-4fb2-881a-a0cdab18185b-kube-api-access-sv677\") pod \"redhat-operators-vr6w7\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:03 crc kubenswrapper[4835]: I0202 17:44:03.898094 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-catalog-content\") pod \"redhat-operators-vr6w7\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:04 crc kubenswrapper[4835]: I0202 17:44:04.000056 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv677\" (UniqueName: \"kubernetes.io/projected/28a497a6-0b68-4fb2-881a-a0cdab18185b-kube-api-access-sv677\") pod \"redhat-operators-vr6w7\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:04 crc kubenswrapper[4835]: I0202 17:44:04.000107 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-catalog-content\") pod \"redhat-operators-vr6w7\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:04 crc kubenswrapper[4835]: I0202 17:44:04.000200 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-utilities\") pod \"redhat-operators-vr6w7\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:04 crc kubenswrapper[4835]: I0202 17:44:04.000736 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-catalog-content\") pod \"redhat-operators-vr6w7\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:04 crc kubenswrapper[4835]: I0202 17:44:04.000753 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-utilities\") pod \"redhat-operators-vr6w7\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:04 crc kubenswrapper[4835]: I0202 17:44:04.018737 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv677\" (UniqueName: \"kubernetes.io/projected/28a497a6-0b68-4fb2-881a-a0cdab18185b-kube-api-access-sv677\") pod \"redhat-operators-vr6w7\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:04 crc kubenswrapper[4835]: I0202 17:44:04.119011 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:04 crc kubenswrapper[4835]: I0202 17:44:04.649046 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vr6w7"] Feb 02 17:44:05 crc kubenswrapper[4835]: I0202 17:44:05.326654 4835 generic.go:334] "Generic (PLEG): container finished" podID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerID="536bc814e206bbcffb48a9ab36689cd2c4cad260f2988dbdede4d2e699a8352e" exitCode=0 Feb 02 17:44:05 crc kubenswrapper[4835]: I0202 17:44:05.326708 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vr6w7" event={"ID":"28a497a6-0b68-4fb2-881a-a0cdab18185b","Type":"ContainerDied","Data":"536bc814e206bbcffb48a9ab36689cd2c4cad260f2988dbdede4d2e699a8352e"} Feb 02 17:44:05 crc kubenswrapper[4835]: I0202 17:44:05.326916 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vr6w7" event={"ID":"28a497a6-0b68-4fb2-881a-a0cdab18185b","Type":"ContainerStarted","Data":"2d07bd970b319b0db719794f2cffb4f7520501c354466df422cc8edcef01dd27"} Feb 02 17:44:06 crc kubenswrapper[4835]: I0202 17:44:06.339115 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vr6w7" event={"ID":"28a497a6-0b68-4fb2-881a-a0cdab18185b","Type":"ContainerStarted","Data":"71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632"} Feb 02 17:44:08 crc kubenswrapper[4835]: I0202 17:44:08.359397 4835 generic.go:334] "Generic (PLEG): container finished" podID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerID="71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632" exitCode=0 Feb 02 17:44:08 crc kubenswrapper[4835]: I0202 17:44:08.359573 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vr6w7" event={"ID":"28a497a6-0b68-4fb2-881a-a0cdab18185b","Type":"ContainerDied","Data":"71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632"} Feb 02 17:44:09 crc kubenswrapper[4835]: I0202 17:44:09.369795 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vr6w7" event={"ID":"28a497a6-0b68-4fb2-881a-a0cdab18185b","Type":"ContainerStarted","Data":"85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729"} Feb 02 17:44:09 crc kubenswrapper[4835]: I0202 17:44:09.398022 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vr6w7" podStartSLOduration=2.858051395 podStartE2EDuration="6.397999005s" podCreationTimestamp="2026-02-02 17:44:03 +0000 UTC" firstStartedPulling="2026-02-02 17:44:05.330580142 +0000 UTC m=+3236.952184222" lastFinishedPulling="2026-02-02 17:44:08.870527712 +0000 UTC m=+3240.492131832" observedRunningTime="2026-02-02 17:44:09.392914111 +0000 UTC m=+3241.014518191" watchObservedRunningTime="2026-02-02 17:44:09.397999005 +0000 UTC m=+3241.019603095" Feb 02 17:44:11 crc kubenswrapper[4835]: I0202 17:44:11.188716 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:44:11 crc kubenswrapper[4835]: E0202 17:44:11.189198 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:44:14 crc kubenswrapper[4835]: I0202 17:44:14.120309 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:14 crc kubenswrapper[4835]: I0202 17:44:14.120893 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:14 crc kubenswrapper[4835]: I0202 17:44:14.143819 4835 scope.go:117] "RemoveContainer" containerID="586adfcb28f1300cc5d1b9ca3414d9043ae073502e1c357412494828fd854505" Feb 02 17:44:14 crc kubenswrapper[4835]: I0202 17:44:14.173847 4835 scope.go:117] "RemoveContainer" containerID="b4a83f70f2990b82ef14ada3b958133f1bf6cc74c4248b3aaa4bf89501b57bef" Feb 02 17:44:15 crc kubenswrapper[4835]: I0202 17:44:15.164063 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vr6w7" podUID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerName="registry-server" probeResult="failure" output=< Feb 02 17:44:15 crc kubenswrapper[4835]: timeout: failed to connect service ":50051" within 1s Feb 02 17:44:15 crc kubenswrapper[4835]: > Feb 02 17:44:24 crc kubenswrapper[4835]: I0202 17:44:24.182496 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:24 crc kubenswrapper[4835]: I0202 17:44:24.264126 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:24 crc kubenswrapper[4835]: I0202 17:44:24.456062 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vr6w7"] Feb 02 17:44:25 crc kubenswrapper[4835]: I0202 17:44:25.509622 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vr6w7" podUID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerName="registry-server" containerID="cri-o://85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729" gracePeriod=2 Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.086382 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.167338 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-utilities\") pod \"28a497a6-0b68-4fb2-881a-a0cdab18185b\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.167387 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sv677\" (UniqueName: \"kubernetes.io/projected/28a497a6-0b68-4fb2-881a-a0cdab18185b-kube-api-access-sv677\") pod \"28a497a6-0b68-4fb2-881a-a0cdab18185b\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.167502 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-catalog-content\") pod \"28a497a6-0b68-4fb2-881a-a0cdab18185b\" (UID: \"28a497a6-0b68-4fb2-881a-a0cdab18185b\") " Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.168771 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-utilities" (OuterVolumeSpecName: "utilities") pod "28a497a6-0b68-4fb2-881a-a0cdab18185b" (UID: "28a497a6-0b68-4fb2-881a-a0cdab18185b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.177753 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28a497a6-0b68-4fb2-881a-a0cdab18185b-kube-api-access-sv677" (OuterVolumeSpecName: "kube-api-access-sv677") pod "28a497a6-0b68-4fb2-881a-a0cdab18185b" (UID: "28a497a6-0b68-4fb2-881a-a0cdab18185b"). InnerVolumeSpecName "kube-api-access-sv677". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.188934 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:44:26 crc kubenswrapper[4835]: E0202 17:44:26.189236 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.269395 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.269428 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sv677\" (UniqueName: \"kubernetes.io/projected/28a497a6-0b68-4fb2-881a-a0cdab18185b-kube-api-access-sv677\") on node \"crc\" DevicePath \"\"" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.309643 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28a497a6-0b68-4fb2-881a-a0cdab18185b" (UID: "28a497a6-0b68-4fb2-881a-a0cdab18185b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.371550 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28a497a6-0b68-4fb2-881a-a0cdab18185b-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.521502 4835 generic.go:334] "Generic (PLEG): container finished" podID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerID="85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729" exitCode=0 Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.521553 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vr6w7" event={"ID":"28a497a6-0b68-4fb2-881a-a0cdab18185b","Type":"ContainerDied","Data":"85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729"} Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.521583 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vr6w7" event={"ID":"28a497a6-0b68-4fb2-881a-a0cdab18185b","Type":"ContainerDied","Data":"2d07bd970b319b0db719794f2cffb4f7520501c354466df422cc8edcef01dd27"} Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.521602 4835 scope.go:117] "RemoveContainer" containerID="85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.521734 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vr6w7" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.562844 4835 scope.go:117] "RemoveContainer" containerID="71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.567379 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vr6w7"] Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.575691 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vr6w7"] Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.583477 4835 scope.go:117] "RemoveContainer" containerID="536bc814e206bbcffb48a9ab36689cd2c4cad260f2988dbdede4d2e699a8352e" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.651792 4835 scope.go:117] "RemoveContainer" containerID="85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729" Feb 02 17:44:26 crc kubenswrapper[4835]: E0202 17:44:26.652341 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729\": container with ID starting with 85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729 not found: ID does not exist" containerID="85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.652390 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729"} err="failed to get container status \"85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729\": rpc error: code = NotFound desc = could not find container \"85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729\": container with ID starting with 85950d4841b5fd29468e2270ec9e0868a9258d6382d649df94ecd31cc4a24729 not found: ID does not exist" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.652421 4835 scope.go:117] "RemoveContainer" containerID="71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632" Feb 02 17:44:26 crc kubenswrapper[4835]: E0202 17:44:26.652909 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632\": container with ID starting with 71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632 not found: ID does not exist" containerID="71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.652957 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632"} err="failed to get container status \"71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632\": rpc error: code = NotFound desc = could not find container \"71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632\": container with ID starting with 71e0247340f76663043336d5723ff2e26d22756344fd6ed23dfe21f9b4285632 not found: ID does not exist" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.652987 4835 scope.go:117] "RemoveContainer" containerID="536bc814e206bbcffb48a9ab36689cd2c4cad260f2988dbdede4d2e699a8352e" Feb 02 17:44:26 crc kubenswrapper[4835]: E0202 17:44:26.653304 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"536bc814e206bbcffb48a9ab36689cd2c4cad260f2988dbdede4d2e699a8352e\": container with ID starting with 536bc814e206bbcffb48a9ab36689cd2c4cad260f2988dbdede4d2e699a8352e not found: ID does not exist" containerID="536bc814e206bbcffb48a9ab36689cd2c4cad260f2988dbdede4d2e699a8352e" Feb 02 17:44:26 crc kubenswrapper[4835]: I0202 17:44:26.653335 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"536bc814e206bbcffb48a9ab36689cd2c4cad260f2988dbdede4d2e699a8352e"} err="failed to get container status \"536bc814e206bbcffb48a9ab36689cd2c4cad260f2988dbdede4d2e699a8352e\": rpc error: code = NotFound desc = could not find container \"536bc814e206bbcffb48a9ab36689cd2c4cad260f2988dbdede4d2e699a8352e\": container with ID starting with 536bc814e206bbcffb48a9ab36689cd2c4cad260f2988dbdede4d2e699a8352e not found: ID does not exist" Feb 02 17:44:27 crc kubenswrapper[4835]: I0202 17:44:27.207657 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28a497a6-0b68-4fb2-881a-a0cdab18185b" path="/var/lib/kubelet/pods/28a497a6-0b68-4fb2-881a-a0cdab18185b/volumes" Feb 02 17:44:41 crc kubenswrapper[4835]: I0202 17:44:41.189132 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:44:41 crc kubenswrapper[4835]: E0202 17:44:41.189788 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:44:52 crc kubenswrapper[4835]: I0202 17:44:52.188932 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:44:52 crc kubenswrapper[4835]: E0202 17:44:52.190146 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.151942 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k"] Feb 02 17:45:00 crc kubenswrapper[4835]: E0202 17:45:00.152865 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerName="extract-utilities" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.152881 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerName="extract-utilities" Feb 02 17:45:00 crc kubenswrapper[4835]: E0202 17:45:00.152898 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerName="registry-server" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.152906 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerName="registry-server" Feb 02 17:45:00 crc kubenswrapper[4835]: E0202 17:45:00.152931 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerName="extract-content" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.152940 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerName="extract-content" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.153233 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="28a497a6-0b68-4fb2-881a-a0cdab18185b" containerName="registry-server" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.154023 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.156909 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.157179 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.210793 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k"] Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.260470 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b996e916-012e-4430-965b-787910535ac0-config-volume\") pod \"collect-profiles-29500905-lb99k\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.260548 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hq6z7\" (UniqueName: \"kubernetes.io/projected/b996e916-012e-4430-965b-787910535ac0-kube-api-access-hq6z7\") pod \"collect-profiles-29500905-lb99k\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.260577 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b996e916-012e-4430-965b-787910535ac0-secret-volume\") pod \"collect-profiles-29500905-lb99k\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.362389 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b996e916-012e-4430-965b-787910535ac0-config-volume\") pod \"collect-profiles-29500905-lb99k\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.362462 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hq6z7\" (UniqueName: \"kubernetes.io/projected/b996e916-012e-4430-965b-787910535ac0-kube-api-access-hq6z7\") pod \"collect-profiles-29500905-lb99k\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.362497 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b996e916-012e-4430-965b-787910535ac0-secret-volume\") pod \"collect-profiles-29500905-lb99k\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.363389 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b996e916-012e-4430-965b-787910535ac0-config-volume\") pod \"collect-profiles-29500905-lb99k\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.372149 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b996e916-012e-4430-965b-787910535ac0-secret-volume\") pod \"collect-profiles-29500905-lb99k\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.410364 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hq6z7\" (UniqueName: \"kubernetes.io/projected/b996e916-012e-4430-965b-787910535ac0-kube-api-access-hq6z7\") pod \"collect-profiles-29500905-lb99k\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.518441 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:00 crc kubenswrapper[4835]: I0202 17:45:00.964806 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k"] Feb 02 17:45:01 crc kubenswrapper[4835]: I0202 17:45:01.840420 4835 generic.go:334] "Generic (PLEG): container finished" podID="b996e916-012e-4430-965b-787910535ac0" containerID="8ce997de04b2c35d7abf86052070140bfeb5e23603673f8c0ea3f7083a266cf7" exitCode=0 Feb 02 17:45:01 crc kubenswrapper[4835]: I0202 17:45:01.840503 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" event={"ID":"b996e916-012e-4430-965b-787910535ac0","Type":"ContainerDied","Data":"8ce997de04b2c35d7abf86052070140bfeb5e23603673f8c0ea3f7083a266cf7"} Feb 02 17:45:01 crc kubenswrapper[4835]: I0202 17:45:01.840898 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" event={"ID":"b996e916-012e-4430-965b-787910535ac0","Type":"ContainerStarted","Data":"c7755dc6641612056f38ad5c76863e7d0dc1fcbf6ac127d214436a7bb1c5409b"} Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.327760 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.435101 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b996e916-012e-4430-965b-787910535ac0-secret-volume\") pod \"b996e916-012e-4430-965b-787910535ac0\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.435226 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b996e916-012e-4430-965b-787910535ac0-config-volume\") pod \"b996e916-012e-4430-965b-787910535ac0\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.435429 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hq6z7\" (UniqueName: \"kubernetes.io/projected/b996e916-012e-4430-965b-787910535ac0-kube-api-access-hq6z7\") pod \"b996e916-012e-4430-965b-787910535ac0\" (UID: \"b996e916-012e-4430-965b-787910535ac0\") " Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.437780 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b996e916-012e-4430-965b-787910535ac0-config-volume" (OuterVolumeSpecName: "config-volume") pod "b996e916-012e-4430-965b-787910535ac0" (UID: "b996e916-012e-4430-965b-787910535ac0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.450444 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b996e916-012e-4430-965b-787910535ac0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b996e916-012e-4430-965b-787910535ac0" (UID: "b996e916-012e-4430-965b-787910535ac0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.450508 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b996e916-012e-4430-965b-787910535ac0-kube-api-access-hq6z7" (OuterVolumeSpecName: "kube-api-access-hq6z7") pod "b996e916-012e-4430-965b-787910535ac0" (UID: "b996e916-012e-4430-965b-787910535ac0"). InnerVolumeSpecName "kube-api-access-hq6z7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.537879 4835 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b996e916-012e-4430-965b-787910535ac0-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.537947 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hq6z7\" (UniqueName: \"kubernetes.io/projected/b996e916-012e-4430-965b-787910535ac0-kube-api-access-hq6z7\") on node \"crc\" DevicePath \"\"" Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.537963 4835 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b996e916-012e-4430-965b-787910535ac0-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.880894 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" event={"ID":"b996e916-012e-4430-965b-787910535ac0","Type":"ContainerDied","Data":"c7755dc6641612056f38ad5c76863e7d0dc1fcbf6ac127d214436a7bb1c5409b"} Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.881217 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7755dc6641612056f38ad5c76863e7d0dc1fcbf6ac127d214436a7bb1c5409b" Feb 02 17:45:03 crc kubenswrapper[4835]: I0202 17:45:03.881300 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500905-lb99k" Feb 02 17:45:04 crc kubenswrapper[4835]: I0202 17:45:04.413094 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5"] Feb 02 17:45:04 crc kubenswrapper[4835]: I0202 17:45:04.420372 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500860-d8hf5"] Feb 02 17:45:05 crc kubenswrapper[4835]: I0202 17:45:05.213970 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2" path="/var/lib/kubelet/pods/4c032d81-fc19-4a2b-a6ae-fdf9ab979cc2/volumes" Feb 02 17:45:07 crc kubenswrapper[4835]: I0202 17:45:07.189382 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:45:07 crc kubenswrapper[4835]: E0202 17:45:07.190698 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.087391 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ghnkc"] Feb 02 17:45:11 crc kubenswrapper[4835]: E0202 17:45:11.089538 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b996e916-012e-4430-965b-787910535ac0" containerName="collect-profiles" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.089654 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b996e916-012e-4430-965b-787910535ac0" containerName="collect-profiles" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.090005 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b996e916-012e-4430-965b-787910535ac0" containerName="collect-profiles" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.091891 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.103809 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ghnkc"] Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.213195 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-utilities\") pod \"certified-operators-ghnkc\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.213335 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knj6t\" (UniqueName: \"kubernetes.io/projected/80c4f809-9fea-41d8-9730-df175bf8d670-kube-api-access-knj6t\") pod \"certified-operators-ghnkc\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.213446 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-catalog-content\") pod \"certified-operators-ghnkc\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.315494 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-catalog-content\") pod \"certified-operators-ghnkc\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.315925 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-utilities\") pod \"certified-operators-ghnkc\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.316043 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-catalog-content\") pod \"certified-operators-ghnkc\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.316208 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knj6t\" (UniqueName: \"kubernetes.io/projected/80c4f809-9fea-41d8-9730-df175bf8d670-kube-api-access-knj6t\") pod \"certified-operators-ghnkc\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.316411 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-utilities\") pod \"certified-operators-ghnkc\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.344345 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knj6t\" (UniqueName: \"kubernetes.io/projected/80c4f809-9fea-41d8-9730-df175bf8d670-kube-api-access-knj6t\") pod \"certified-operators-ghnkc\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.419580 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:11 crc kubenswrapper[4835]: I0202 17:45:11.973594 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ghnkc"] Feb 02 17:45:12 crc kubenswrapper[4835]: I0202 17:45:12.016176 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghnkc" event={"ID":"80c4f809-9fea-41d8-9730-df175bf8d670","Type":"ContainerStarted","Data":"f86a15f299a407a5fae446386cb68beb7f2efe1f7771508d4a6288979a8061f0"} Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.027148 4835 generic.go:334] "Generic (PLEG): container finished" podID="80c4f809-9fea-41d8-9730-df175bf8d670" containerID="c9852188b93ea51c1e02ccc8cabb3c489d87919c9e70d927d2931a78c7d6281d" exitCode=0 Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.027322 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghnkc" event={"ID":"80c4f809-9fea-41d8-9730-df175bf8d670","Type":"ContainerDied","Data":"c9852188b93ea51c1e02ccc8cabb3c489d87919c9e70d927d2931a78c7d6281d"} Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.479632 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lbgmv"] Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.482120 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.491602 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lbgmv"] Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.564661 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv6q9\" (UniqueName: \"kubernetes.io/projected/790d3c5a-5437-4075-9279-117abd9030a5-kube-api-access-vv6q9\") pod \"community-operators-lbgmv\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.564836 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-utilities\") pod \"community-operators-lbgmv\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.564899 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-catalog-content\") pod \"community-operators-lbgmv\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.666883 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv6q9\" (UniqueName: \"kubernetes.io/projected/790d3c5a-5437-4075-9279-117abd9030a5-kube-api-access-vv6q9\") pod \"community-operators-lbgmv\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.667007 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-utilities\") pod \"community-operators-lbgmv\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.667055 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-catalog-content\") pod \"community-operators-lbgmv\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.667623 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-catalog-content\") pod \"community-operators-lbgmv\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.667705 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-utilities\") pod \"community-operators-lbgmv\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.686224 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv6q9\" (UniqueName: \"kubernetes.io/projected/790d3c5a-5437-4075-9279-117abd9030a5-kube-api-access-vv6q9\") pod \"community-operators-lbgmv\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:13 crc kubenswrapper[4835]: I0202 17:45:13.800230 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:14 crc kubenswrapper[4835]: I0202 17:45:14.379459 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lbgmv"] Feb 02 17:45:14 crc kubenswrapper[4835]: I0202 17:45:14.415412 4835 scope.go:117] "RemoveContainer" containerID="ed4a7e5ef8c0975e1ef40e129f19928157c810d32a79cac717df18f6f4358aa5" Feb 02 17:45:15 crc kubenswrapper[4835]: I0202 17:45:15.066056 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbgmv" event={"ID":"790d3c5a-5437-4075-9279-117abd9030a5","Type":"ContainerStarted","Data":"74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad"} Feb 02 17:45:15 crc kubenswrapper[4835]: I0202 17:45:15.066463 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbgmv" event={"ID":"790d3c5a-5437-4075-9279-117abd9030a5","Type":"ContainerStarted","Data":"f4130ce184dfeb81ad2508840372a1054e42905694d8bc346187f187c5e22b37"} Feb 02 17:45:15 crc kubenswrapper[4835]: I0202 17:45:15.068856 4835 generic.go:334] "Generic (PLEG): container finished" podID="80c4f809-9fea-41d8-9730-df175bf8d670" containerID="373f2bac75f42452c18d6552570920d1902e53035aa1810a91e287c672fef9c4" exitCode=0 Feb 02 17:45:15 crc kubenswrapper[4835]: I0202 17:45:15.068905 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghnkc" event={"ID":"80c4f809-9fea-41d8-9730-df175bf8d670","Type":"ContainerDied","Data":"373f2bac75f42452c18d6552570920d1902e53035aa1810a91e287c672fef9c4"} Feb 02 17:45:16 crc kubenswrapper[4835]: I0202 17:45:16.080389 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghnkc" event={"ID":"80c4f809-9fea-41d8-9730-df175bf8d670","Type":"ContainerStarted","Data":"57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0"} Feb 02 17:45:16 crc kubenswrapper[4835]: I0202 17:45:16.082169 4835 generic.go:334] "Generic (PLEG): container finished" podID="790d3c5a-5437-4075-9279-117abd9030a5" containerID="74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad" exitCode=0 Feb 02 17:45:16 crc kubenswrapper[4835]: I0202 17:45:16.082205 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbgmv" event={"ID":"790d3c5a-5437-4075-9279-117abd9030a5","Type":"ContainerDied","Data":"74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad"} Feb 02 17:45:16 crc kubenswrapper[4835]: I0202 17:45:16.109188 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ghnkc" podStartSLOduration=2.6362276270000002 podStartE2EDuration="5.10916757s" podCreationTimestamp="2026-02-02 17:45:11 +0000 UTC" firstStartedPulling="2026-02-02 17:45:13.030461423 +0000 UTC m=+3304.652065503" lastFinishedPulling="2026-02-02 17:45:15.503401366 +0000 UTC m=+3307.125005446" observedRunningTime="2026-02-02 17:45:16.099919869 +0000 UTC m=+3307.721523969" watchObservedRunningTime="2026-02-02 17:45:16.10916757 +0000 UTC m=+3307.730771660" Feb 02 17:45:18 crc kubenswrapper[4835]: I0202 17:45:18.098453 4835 generic.go:334] "Generic (PLEG): container finished" podID="790d3c5a-5437-4075-9279-117abd9030a5" containerID="a5623f1b1452425e72e199dc67bd58f06bdb38bf307a10d8348d6b73d9a8117b" exitCode=0 Feb 02 17:45:18 crc kubenswrapper[4835]: I0202 17:45:18.098520 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbgmv" event={"ID":"790d3c5a-5437-4075-9279-117abd9030a5","Type":"ContainerDied","Data":"a5623f1b1452425e72e199dc67bd58f06bdb38bf307a10d8348d6b73d9a8117b"} Feb 02 17:45:18 crc kubenswrapper[4835]: I0202 17:45:18.188340 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:45:18 crc kubenswrapper[4835]: E0202 17:45:18.189317 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:45:19 crc kubenswrapper[4835]: I0202 17:45:19.110426 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbgmv" event={"ID":"790d3c5a-5437-4075-9279-117abd9030a5","Type":"ContainerStarted","Data":"575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d"} Feb 02 17:45:19 crc kubenswrapper[4835]: I0202 17:45:19.132159 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lbgmv" podStartSLOduration=3.63229159 podStartE2EDuration="6.132141703s" podCreationTimestamp="2026-02-02 17:45:13 +0000 UTC" firstStartedPulling="2026-02-02 17:45:16.08401778 +0000 UTC m=+3307.705621860" lastFinishedPulling="2026-02-02 17:45:18.583867903 +0000 UTC m=+3310.205471973" observedRunningTime="2026-02-02 17:45:19.12673791 +0000 UTC m=+3310.748342000" watchObservedRunningTime="2026-02-02 17:45:19.132141703 +0000 UTC m=+3310.753745783" Feb 02 17:45:21 crc kubenswrapper[4835]: I0202 17:45:21.419881 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:21 crc kubenswrapper[4835]: I0202 17:45:21.420381 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:21 crc kubenswrapper[4835]: I0202 17:45:21.470034 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:22 crc kubenswrapper[4835]: I0202 17:45:22.186851 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:22 crc kubenswrapper[4835]: I0202 17:45:22.674933 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ghnkc"] Feb 02 17:45:23 crc kubenswrapper[4835]: I0202 17:45:23.801914 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:23 crc kubenswrapper[4835]: I0202 17:45:23.802141 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:23 crc kubenswrapper[4835]: I0202 17:45:23.857409 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:24 crc kubenswrapper[4835]: I0202 17:45:24.148870 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ghnkc" podUID="80c4f809-9fea-41d8-9730-df175bf8d670" containerName="registry-server" containerID="cri-o://57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0" gracePeriod=2 Feb 02 17:45:24 crc kubenswrapper[4835]: I0202 17:45:24.197134 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:24 crc kubenswrapper[4835]: I0202 17:45:24.820751 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:24 crc kubenswrapper[4835]: I0202 17:45:24.913190 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-catalog-content\") pod \"80c4f809-9fea-41d8-9730-df175bf8d670\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " Feb 02 17:45:24 crc kubenswrapper[4835]: I0202 17:45:24.913249 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knj6t\" (UniqueName: \"kubernetes.io/projected/80c4f809-9fea-41d8-9730-df175bf8d670-kube-api-access-knj6t\") pod \"80c4f809-9fea-41d8-9730-df175bf8d670\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " Feb 02 17:45:24 crc kubenswrapper[4835]: I0202 17:45:24.913545 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-utilities\") pod \"80c4f809-9fea-41d8-9730-df175bf8d670\" (UID: \"80c4f809-9fea-41d8-9730-df175bf8d670\") " Feb 02 17:45:24 crc kubenswrapper[4835]: I0202 17:45:24.914558 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-utilities" (OuterVolumeSpecName: "utilities") pod "80c4f809-9fea-41d8-9730-df175bf8d670" (UID: "80c4f809-9fea-41d8-9730-df175bf8d670"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:45:24 crc kubenswrapper[4835]: I0202 17:45:24.920621 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80c4f809-9fea-41d8-9730-df175bf8d670-kube-api-access-knj6t" (OuterVolumeSpecName: "kube-api-access-knj6t") pod "80c4f809-9fea-41d8-9730-df175bf8d670" (UID: "80c4f809-9fea-41d8-9730-df175bf8d670"). InnerVolumeSpecName "kube-api-access-knj6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:45:24 crc kubenswrapper[4835]: I0202 17:45:24.966834 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "80c4f809-9fea-41d8-9730-df175bf8d670" (UID: "80c4f809-9fea-41d8-9730-df175bf8d670"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.015692 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knj6t\" (UniqueName: \"kubernetes.io/projected/80c4f809-9fea-41d8-9730-df175bf8d670-kube-api-access-knj6t\") on node \"crc\" DevicePath \"\"" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.015730 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.015739 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80c4f809-9fea-41d8-9730-df175bf8d670-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.163561 4835 generic.go:334] "Generic (PLEG): container finished" podID="80c4f809-9fea-41d8-9730-df175bf8d670" containerID="57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0" exitCode=0 Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.163710 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ghnkc" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.163682 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghnkc" event={"ID":"80c4f809-9fea-41d8-9730-df175bf8d670","Type":"ContainerDied","Data":"57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0"} Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.163810 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ghnkc" event={"ID":"80c4f809-9fea-41d8-9730-df175bf8d670","Type":"ContainerDied","Data":"f86a15f299a407a5fae446386cb68beb7f2efe1f7771508d4a6288979a8061f0"} Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.163852 4835 scope.go:117] "RemoveContainer" containerID="57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.201907 4835 scope.go:117] "RemoveContainer" containerID="373f2bac75f42452c18d6552570920d1902e53035aa1810a91e287c672fef9c4" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.216854 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ghnkc"] Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.226504 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ghnkc"] Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.234517 4835 scope.go:117] "RemoveContainer" containerID="c9852188b93ea51c1e02ccc8cabb3c489d87919c9e70d927d2931a78c7d6281d" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.281683 4835 scope.go:117] "RemoveContainer" containerID="57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0" Feb 02 17:45:25 crc kubenswrapper[4835]: E0202 17:45:25.282085 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0\": container with ID starting with 57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0 not found: ID does not exist" containerID="57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.282141 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0"} err="failed to get container status \"57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0\": rpc error: code = NotFound desc = could not find container \"57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0\": container with ID starting with 57794dde0dcc8feb243761ad67b45ef954a0af4d05dcdd9ebf8f3ed020229df0 not found: ID does not exist" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.282173 4835 scope.go:117] "RemoveContainer" containerID="373f2bac75f42452c18d6552570920d1902e53035aa1810a91e287c672fef9c4" Feb 02 17:45:25 crc kubenswrapper[4835]: E0202 17:45:25.282434 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"373f2bac75f42452c18d6552570920d1902e53035aa1810a91e287c672fef9c4\": container with ID starting with 373f2bac75f42452c18d6552570920d1902e53035aa1810a91e287c672fef9c4 not found: ID does not exist" containerID="373f2bac75f42452c18d6552570920d1902e53035aa1810a91e287c672fef9c4" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.283032 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"373f2bac75f42452c18d6552570920d1902e53035aa1810a91e287c672fef9c4"} err="failed to get container status \"373f2bac75f42452c18d6552570920d1902e53035aa1810a91e287c672fef9c4\": rpc error: code = NotFound desc = could not find container \"373f2bac75f42452c18d6552570920d1902e53035aa1810a91e287c672fef9c4\": container with ID starting with 373f2bac75f42452c18d6552570920d1902e53035aa1810a91e287c672fef9c4 not found: ID does not exist" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.283052 4835 scope.go:117] "RemoveContainer" containerID="c9852188b93ea51c1e02ccc8cabb3c489d87919c9e70d927d2931a78c7d6281d" Feb 02 17:45:25 crc kubenswrapper[4835]: E0202 17:45:25.283298 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9852188b93ea51c1e02ccc8cabb3c489d87919c9e70d927d2931a78c7d6281d\": container with ID starting with c9852188b93ea51c1e02ccc8cabb3c489d87919c9e70d927d2931a78c7d6281d not found: ID does not exist" containerID="c9852188b93ea51c1e02ccc8cabb3c489d87919c9e70d927d2931a78c7d6281d" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.283325 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9852188b93ea51c1e02ccc8cabb3c489d87919c9e70d927d2931a78c7d6281d"} err="failed to get container status \"c9852188b93ea51c1e02ccc8cabb3c489d87919c9e70d927d2931a78c7d6281d\": rpc error: code = NotFound desc = could not find container \"c9852188b93ea51c1e02ccc8cabb3c489d87919c9e70d927d2931a78c7d6281d\": container with ID starting with c9852188b93ea51c1e02ccc8cabb3c489d87919c9e70d927d2931a78c7d6281d not found: ID does not exist" Feb 02 17:45:25 crc kubenswrapper[4835]: I0202 17:45:25.289413 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lbgmv"] Feb 02 17:45:26 crc kubenswrapper[4835]: I0202 17:45:26.171418 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lbgmv" podUID="790d3c5a-5437-4075-9279-117abd9030a5" containerName="registry-server" containerID="cri-o://575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d" gracePeriod=2 Feb 02 17:45:26 crc kubenswrapper[4835]: I0202 17:45:26.807322 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:26 crc kubenswrapper[4835]: I0202 17:45:26.950592 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-utilities\") pod \"790d3c5a-5437-4075-9279-117abd9030a5\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " Feb 02 17:45:26 crc kubenswrapper[4835]: I0202 17:45:26.950676 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vv6q9\" (UniqueName: \"kubernetes.io/projected/790d3c5a-5437-4075-9279-117abd9030a5-kube-api-access-vv6q9\") pod \"790d3c5a-5437-4075-9279-117abd9030a5\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " Feb 02 17:45:26 crc kubenswrapper[4835]: I0202 17:45:26.950845 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-catalog-content\") pod \"790d3c5a-5437-4075-9279-117abd9030a5\" (UID: \"790d3c5a-5437-4075-9279-117abd9030a5\") " Feb 02 17:45:26 crc kubenswrapper[4835]: I0202 17:45:26.951439 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-utilities" (OuterVolumeSpecName: "utilities") pod "790d3c5a-5437-4075-9279-117abd9030a5" (UID: "790d3c5a-5437-4075-9279-117abd9030a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:45:26 crc kubenswrapper[4835]: I0202 17:45:26.956458 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/790d3c5a-5437-4075-9279-117abd9030a5-kube-api-access-vv6q9" (OuterVolumeSpecName: "kube-api-access-vv6q9") pod "790d3c5a-5437-4075-9279-117abd9030a5" (UID: "790d3c5a-5437-4075-9279-117abd9030a5"). InnerVolumeSpecName "kube-api-access-vv6q9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.003293 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "790d3c5a-5437-4075-9279-117abd9030a5" (UID: "790d3c5a-5437-4075-9279-117abd9030a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.073551 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.073600 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/790d3c5a-5437-4075-9279-117abd9030a5-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.073615 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vv6q9\" (UniqueName: \"kubernetes.io/projected/790d3c5a-5437-4075-9279-117abd9030a5-kube-api-access-vv6q9\") on node \"crc\" DevicePath \"\"" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.181223 4835 generic.go:334] "Generic (PLEG): container finished" podID="790d3c5a-5437-4075-9279-117abd9030a5" containerID="575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d" exitCode=0 Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.181309 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbgmv" event={"ID":"790d3c5a-5437-4075-9279-117abd9030a5","Type":"ContainerDied","Data":"575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d"} Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.181356 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbgmv" event={"ID":"790d3c5a-5437-4075-9279-117abd9030a5","Type":"ContainerDied","Data":"f4130ce184dfeb81ad2508840372a1054e42905694d8bc346187f187c5e22b37"} Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.181350 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lbgmv" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.181372 4835 scope.go:117] "RemoveContainer" containerID="575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.202080 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80c4f809-9fea-41d8-9730-df175bf8d670" path="/var/lib/kubelet/pods/80c4f809-9fea-41d8-9730-df175bf8d670/volumes" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.215872 4835 scope.go:117] "RemoveContainer" containerID="a5623f1b1452425e72e199dc67bd58f06bdb38bf307a10d8348d6b73d9a8117b" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.220173 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lbgmv"] Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.227750 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lbgmv"] Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.245360 4835 scope.go:117] "RemoveContainer" containerID="74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.279346 4835 scope.go:117] "RemoveContainer" containerID="575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d" Feb 02 17:45:27 crc kubenswrapper[4835]: E0202 17:45:27.279763 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d\": container with ID starting with 575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d not found: ID does not exist" containerID="575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.279800 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d"} err="failed to get container status \"575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d\": rpc error: code = NotFound desc = could not find container \"575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d\": container with ID starting with 575741ec1bdd10ac24faf0b7c0317d1c579f92fdd43d46beae9fded63fdcaa1d not found: ID does not exist" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.279825 4835 scope.go:117] "RemoveContainer" containerID="a5623f1b1452425e72e199dc67bd58f06bdb38bf307a10d8348d6b73d9a8117b" Feb 02 17:45:27 crc kubenswrapper[4835]: E0202 17:45:27.280974 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5623f1b1452425e72e199dc67bd58f06bdb38bf307a10d8348d6b73d9a8117b\": container with ID starting with a5623f1b1452425e72e199dc67bd58f06bdb38bf307a10d8348d6b73d9a8117b not found: ID does not exist" containerID="a5623f1b1452425e72e199dc67bd58f06bdb38bf307a10d8348d6b73d9a8117b" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.280995 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5623f1b1452425e72e199dc67bd58f06bdb38bf307a10d8348d6b73d9a8117b"} err="failed to get container status \"a5623f1b1452425e72e199dc67bd58f06bdb38bf307a10d8348d6b73d9a8117b\": rpc error: code = NotFound desc = could not find container \"a5623f1b1452425e72e199dc67bd58f06bdb38bf307a10d8348d6b73d9a8117b\": container with ID starting with a5623f1b1452425e72e199dc67bd58f06bdb38bf307a10d8348d6b73d9a8117b not found: ID does not exist" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.281010 4835 scope.go:117] "RemoveContainer" containerID="74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad" Feb 02 17:45:27 crc kubenswrapper[4835]: E0202 17:45:27.283461 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad\": container with ID starting with 74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad not found: ID does not exist" containerID="74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad" Feb 02 17:45:27 crc kubenswrapper[4835]: I0202 17:45:27.283593 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad"} err="failed to get container status \"74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad\": rpc error: code = NotFound desc = could not find container \"74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad\": container with ID starting with 74f39d1f05a5fd755953247357d0c2e640f37814c1695f227c1a3463996ee4ad not found: ID does not exist" Feb 02 17:45:29 crc kubenswrapper[4835]: I0202 17:45:29.203195 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="790d3c5a-5437-4075-9279-117abd9030a5" path="/var/lib/kubelet/pods/790d3c5a-5437-4075-9279-117abd9030a5/volumes" Feb 02 17:45:32 crc kubenswrapper[4835]: I0202 17:45:32.188500 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:45:32 crc kubenswrapper[4835]: E0202 17:45:32.188962 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:45:43 crc kubenswrapper[4835]: I0202 17:45:43.188450 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:45:43 crc kubenswrapper[4835]: E0202 17:45:43.189153 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.188842 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:45:54 crc kubenswrapper[4835]: E0202 17:45:54.189701 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.195502 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w27pk"] Feb 02 17:45:54 crc kubenswrapper[4835]: E0202 17:45:54.195916 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80c4f809-9fea-41d8-9730-df175bf8d670" containerName="extract-utilities" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.195936 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="80c4f809-9fea-41d8-9730-df175bf8d670" containerName="extract-utilities" Feb 02 17:45:54 crc kubenswrapper[4835]: E0202 17:45:54.195949 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80c4f809-9fea-41d8-9730-df175bf8d670" containerName="registry-server" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.195955 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="80c4f809-9fea-41d8-9730-df175bf8d670" containerName="registry-server" Feb 02 17:45:54 crc kubenswrapper[4835]: E0202 17:45:54.195986 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="790d3c5a-5437-4075-9279-117abd9030a5" containerName="extract-utilities" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.195993 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="790d3c5a-5437-4075-9279-117abd9030a5" containerName="extract-utilities" Feb 02 17:45:54 crc kubenswrapper[4835]: E0202 17:45:54.196015 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="790d3c5a-5437-4075-9279-117abd9030a5" containerName="extract-content" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.196020 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="790d3c5a-5437-4075-9279-117abd9030a5" containerName="extract-content" Feb 02 17:45:54 crc kubenswrapper[4835]: E0202 17:45:54.196042 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80c4f809-9fea-41d8-9730-df175bf8d670" containerName="extract-content" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.196049 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="80c4f809-9fea-41d8-9730-df175bf8d670" containerName="extract-content" Feb 02 17:45:54 crc kubenswrapper[4835]: E0202 17:45:54.196065 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="790d3c5a-5437-4075-9279-117abd9030a5" containerName="registry-server" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.196071 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="790d3c5a-5437-4075-9279-117abd9030a5" containerName="registry-server" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.196250 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="790d3c5a-5437-4075-9279-117abd9030a5" containerName="registry-server" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.198566 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="80c4f809-9fea-41d8-9730-df175bf8d670" containerName="registry-server" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.199936 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.209029 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qqbp\" (UniqueName: \"kubernetes.io/projected/010af145-f70a-49a8-9e4d-bffaa05c95ce-kube-api-access-9qqbp\") pod \"redhat-marketplace-w27pk\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.209118 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-catalog-content\") pod \"redhat-marketplace-w27pk\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.209293 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-utilities\") pod \"redhat-marketplace-w27pk\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.220231 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w27pk"] Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.311203 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qqbp\" (UniqueName: \"kubernetes.io/projected/010af145-f70a-49a8-9e4d-bffaa05c95ce-kube-api-access-9qqbp\") pod \"redhat-marketplace-w27pk\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.311599 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-catalog-content\") pod \"redhat-marketplace-w27pk\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.311725 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-utilities\") pod \"redhat-marketplace-w27pk\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.312138 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-catalog-content\") pod \"redhat-marketplace-w27pk\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.312475 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-utilities\") pod \"redhat-marketplace-w27pk\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.348305 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qqbp\" (UniqueName: \"kubernetes.io/projected/010af145-f70a-49a8-9e4d-bffaa05c95ce-kube-api-access-9qqbp\") pod \"redhat-marketplace-w27pk\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:45:54 crc kubenswrapper[4835]: I0202 17:45:54.542198 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:45:55 crc kubenswrapper[4835]: I0202 17:45:55.059175 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w27pk"] Feb 02 17:45:55 crc kubenswrapper[4835]: I0202 17:45:55.447171 4835 generic.go:334] "Generic (PLEG): container finished" podID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerID="10d4dcae67a11a6fd0a9e7bff5932f1193d535aebe83d344575df6489fc437f5" exitCode=0 Feb 02 17:45:55 crc kubenswrapper[4835]: I0202 17:45:55.447256 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w27pk" event={"ID":"010af145-f70a-49a8-9e4d-bffaa05c95ce","Type":"ContainerDied","Data":"10d4dcae67a11a6fd0a9e7bff5932f1193d535aebe83d344575df6489fc437f5"} Feb 02 17:45:55 crc kubenswrapper[4835]: I0202 17:45:55.447493 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w27pk" event={"ID":"010af145-f70a-49a8-9e4d-bffaa05c95ce","Type":"ContainerStarted","Data":"a53997a54029b95ec173cbda25cc44fc913777c112e12d61f962ec22d4807159"} Feb 02 17:45:55 crc kubenswrapper[4835]: I0202 17:45:55.449407 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 17:45:56 crc kubenswrapper[4835]: I0202 17:45:56.458243 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w27pk" event={"ID":"010af145-f70a-49a8-9e4d-bffaa05c95ce","Type":"ContainerStarted","Data":"9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d"} Feb 02 17:45:57 crc kubenswrapper[4835]: I0202 17:45:57.471057 4835 generic.go:334] "Generic (PLEG): container finished" podID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerID="9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d" exitCode=0 Feb 02 17:45:57 crc kubenswrapper[4835]: I0202 17:45:57.471248 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w27pk" event={"ID":"010af145-f70a-49a8-9e4d-bffaa05c95ce","Type":"ContainerDied","Data":"9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d"} Feb 02 17:45:58 crc kubenswrapper[4835]: I0202 17:45:58.481898 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w27pk" event={"ID":"010af145-f70a-49a8-9e4d-bffaa05c95ce","Type":"ContainerStarted","Data":"d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381"} Feb 02 17:45:58 crc kubenswrapper[4835]: I0202 17:45:58.502935 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w27pk" podStartSLOduration=1.818506403 podStartE2EDuration="4.502919368s" podCreationTimestamp="2026-02-02 17:45:54 +0000 UTC" firstStartedPulling="2026-02-02 17:45:55.449096264 +0000 UTC m=+3347.070700344" lastFinishedPulling="2026-02-02 17:45:58.133509229 +0000 UTC m=+3349.755113309" observedRunningTime="2026-02-02 17:45:58.49768851 +0000 UTC m=+3350.119292600" watchObservedRunningTime="2026-02-02 17:45:58.502919368 +0000 UTC m=+3350.124523448" Feb 02 17:46:04 crc kubenswrapper[4835]: I0202 17:46:04.543545 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:46:04 crc kubenswrapper[4835]: I0202 17:46:04.544026 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:46:05 crc kubenswrapper[4835]: I0202 17:46:05.595411 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-w27pk" podUID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerName="registry-server" probeResult="failure" output=< Feb 02 17:46:05 crc kubenswrapper[4835]: timeout: failed to connect service ":50051" within 1s Feb 02 17:46:05 crc kubenswrapper[4835]: > Feb 02 17:46:06 crc kubenswrapper[4835]: I0202 17:46:06.189416 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:46:06 crc kubenswrapper[4835]: E0202 17:46:06.190152 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:46:14 crc kubenswrapper[4835]: I0202 17:46:14.595080 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:46:14 crc kubenswrapper[4835]: I0202 17:46:14.666430 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:46:14 crc kubenswrapper[4835]: I0202 17:46:14.835262 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w27pk"] Feb 02 17:46:15 crc kubenswrapper[4835]: I0202 17:46:15.626802 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-w27pk" podUID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerName="registry-server" containerID="cri-o://d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381" gracePeriod=2 Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.473574 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.582249 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-utilities\") pod \"010af145-f70a-49a8-9e4d-bffaa05c95ce\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.582398 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-catalog-content\") pod \"010af145-f70a-49a8-9e4d-bffaa05c95ce\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.582571 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qqbp\" (UniqueName: \"kubernetes.io/projected/010af145-f70a-49a8-9e4d-bffaa05c95ce-kube-api-access-9qqbp\") pod \"010af145-f70a-49a8-9e4d-bffaa05c95ce\" (UID: \"010af145-f70a-49a8-9e4d-bffaa05c95ce\") " Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.583143 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-utilities" (OuterVolumeSpecName: "utilities") pod "010af145-f70a-49a8-9e4d-bffaa05c95ce" (UID: "010af145-f70a-49a8-9e4d-bffaa05c95ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.588754 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/010af145-f70a-49a8-9e4d-bffaa05c95ce-kube-api-access-9qqbp" (OuterVolumeSpecName: "kube-api-access-9qqbp") pod "010af145-f70a-49a8-9e4d-bffaa05c95ce" (UID: "010af145-f70a-49a8-9e4d-bffaa05c95ce"). InnerVolumeSpecName "kube-api-access-9qqbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.614787 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "010af145-f70a-49a8-9e4d-bffaa05c95ce" (UID: "010af145-f70a-49a8-9e4d-bffaa05c95ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.640856 4835 generic.go:334] "Generic (PLEG): container finished" podID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerID="d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381" exitCode=0 Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.640899 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w27pk" event={"ID":"010af145-f70a-49a8-9e4d-bffaa05c95ce","Type":"ContainerDied","Data":"d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381"} Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.640924 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w27pk" event={"ID":"010af145-f70a-49a8-9e4d-bffaa05c95ce","Type":"ContainerDied","Data":"a53997a54029b95ec173cbda25cc44fc913777c112e12d61f962ec22d4807159"} Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.640941 4835 scope.go:117] "RemoveContainer" containerID="d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.641067 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w27pk" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.675558 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w27pk"] Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.677046 4835 scope.go:117] "RemoveContainer" containerID="9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.683024 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-w27pk"] Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.684582 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.684611 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/010af145-f70a-49a8-9e4d-bffaa05c95ce-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.684624 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qqbp\" (UniqueName: \"kubernetes.io/projected/010af145-f70a-49a8-9e4d-bffaa05c95ce-kube-api-access-9qqbp\") on node \"crc\" DevicePath \"\"" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.703119 4835 scope.go:117] "RemoveContainer" containerID="10d4dcae67a11a6fd0a9e7bff5932f1193d535aebe83d344575df6489fc437f5" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.751443 4835 scope.go:117] "RemoveContainer" containerID="d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381" Feb 02 17:46:16 crc kubenswrapper[4835]: E0202 17:46:16.753121 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381\": container with ID starting with d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381 not found: ID does not exist" containerID="d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.753198 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381"} err="failed to get container status \"d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381\": rpc error: code = NotFound desc = could not find container \"d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381\": container with ID starting with d982ccc470e3d68f5b11950329c0dd5526073900a039d4dcf50d99e8d4e89381 not found: ID does not exist" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.753245 4835 scope.go:117] "RemoveContainer" containerID="9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d" Feb 02 17:46:16 crc kubenswrapper[4835]: E0202 17:46:16.753709 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d\": container with ID starting with 9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d not found: ID does not exist" containerID="9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.753742 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d"} err="failed to get container status \"9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d\": rpc error: code = NotFound desc = could not find container \"9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d\": container with ID starting with 9a57c9ea27411346eb0bf7ff84f9732536fabeaba4d9c665fad209bf9225219d not found: ID does not exist" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.753766 4835 scope.go:117] "RemoveContainer" containerID="10d4dcae67a11a6fd0a9e7bff5932f1193d535aebe83d344575df6489fc437f5" Feb 02 17:46:16 crc kubenswrapper[4835]: E0202 17:46:16.754029 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10d4dcae67a11a6fd0a9e7bff5932f1193d535aebe83d344575df6489fc437f5\": container with ID starting with 10d4dcae67a11a6fd0a9e7bff5932f1193d535aebe83d344575df6489fc437f5 not found: ID does not exist" containerID="10d4dcae67a11a6fd0a9e7bff5932f1193d535aebe83d344575df6489fc437f5" Feb 02 17:46:16 crc kubenswrapper[4835]: I0202 17:46:16.754059 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10d4dcae67a11a6fd0a9e7bff5932f1193d535aebe83d344575df6489fc437f5"} err="failed to get container status \"10d4dcae67a11a6fd0a9e7bff5932f1193d535aebe83d344575df6489fc437f5\": rpc error: code = NotFound desc = could not find container \"10d4dcae67a11a6fd0a9e7bff5932f1193d535aebe83d344575df6489fc437f5\": container with ID starting with 10d4dcae67a11a6fd0a9e7bff5932f1193d535aebe83d344575df6489fc437f5 not found: ID does not exist" Feb 02 17:46:17 crc kubenswrapper[4835]: I0202 17:46:17.210111 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="010af145-f70a-49a8-9e4d-bffaa05c95ce" path="/var/lib/kubelet/pods/010af145-f70a-49a8-9e4d-bffaa05c95ce/volumes" Feb 02 17:46:18 crc kubenswrapper[4835]: I0202 17:46:18.188732 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:46:18 crc kubenswrapper[4835]: E0202 17:46:18.189207 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:46:29 crc kubenswrapper[4835]: I0202 17:46:29.195315 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:46:29 crc kubenswrapper[4835]: E0202 17:46:29.196136 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:46:40 crc kubenswrapper[4835]: I0202 17:46:40.188484 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:46:40 crc kubenswrapper[4835]: E0202 17:46:40.190207 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:46:53 crc kubenswrapper[4835]: I0202 17:46:53.188736 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:46:53 crc kubenswrapper[4835]: E0202 17:46:53.190537 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:47:08 crc kubenswrapper[4835]: I0202 17:47:08.189526 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:47:08 crc kubenswrapper[4835]: E0202 17:47:08.190311 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:47:20 crc kubenswrapper[4835]: I0202 17:47:20.189830 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:47:21 crc kubenswrapper[4835]: I0202 17:47:21.234445 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"199ea57f0a16746aa51bf916aa81f5d4b4260799c6f9055a352a05496c21c831"} Feb 02 17:47:43 crc kubenswrapper[4835]: I0202 17:47:43.041918 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-1a71-account-create-update-8g9t9"] Feb 02 17:47:43 crc kubenswrapper[4835]: I0202 17:47:43.053799 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-1a71-account-create-update-8g9t9"] Feb 02 17:47:43 crc kubenswrapper[4835]: I0202 17:47:43.199244 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6cf38f7-c12c-455a-a5f8-7f1d797f9a60" path="/var/lib/kubelet/pods/e6cf38f7-c12c-455a-a5f8-7f1d797f9a60/volumes" Feb 02 17:47:44 crc kubenswrapper[4835]: I0202 17:47:44.041392 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-f6xwq"] Feb 02 17:47:44 crc kubenswrapper[4835]: I0202 17:47:44.052993 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-f6xwq"] Feb 02 17:47:45 crc kubenswrapper[4835]: I0202 17:47:45.200142 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5884d13a-333b-48c2-9e73-0f0e3369a932" path="/var/lib/kubelet/pods/5884d13a-333b-48c2-9e73-0f0e3369a932/volumes" Feb 02 17:48:14 crc kubenswrapper[4835]: I0202 17:48:14.060316 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-xtzts"] Feb 02 17:48:14 crc kubenswrapper[4835]: I0202 17:48:14.075557 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-xtzts"] Feb 02 17:48:14 crc kubenswrapper[4835]: I0202 17:48:14.748303 4835 scope.go:117] "RemoveContainer" containerID="490f3467aa3c78ccfd60523db4df8f89ce9d16bb445fe8ef18eba8ae3b554802" Feb 02 17:48:14 crc kubenswrapper[4835]: I0202 17:48:14.776331 4835 scope.go:117] "RemoveContainer" containerID="c5230a66094f067556c747acab5b969e6dd2e93843de54cadd0468bfb996aa39" Feb 02 17:48:15 crc kubenswrapper[4835]: I0202 17:48:15.202735 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1664af25-6941-454e-9a16-2f27b62d4433" path="/var/lib/kubelet/pods/1664af25-6941-454e-9a16-2f27b62d4433/volumes" Feb 02 17:49:14 crc kubenswrapper[4835]: I0202 17:49:14.864043 4835 scope.go:117] "RemoveContainer" containerID="6267e780bd0e439f5147ae3b2e856c30f257d95165f74803197f87a621cfa863" Feb 02 17:49:44 crc kubenswrapper[4835]: I0202 17:49:44.870792 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:49:44 crc kubenswrapper[4835]: I0202 17:49:44.871400 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:50:14 crc kubenswrapper[4835]: I0202 17:50:14.869976 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:50:14 crc kubenswrapper[4835]: I0202 17:50:14.870517 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:50:44 crc kubenswrapper[4835]: I0202 17:50:44.869749 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:50:44 crc kubenswrapper[4835]: I0202 17:50:44.870291 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:50:44 crc kubenswrapper[4835]: I0202 17:50:44.870350 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:50:44 crc kubenswrapper[4835]: I0202 17:50:44.871357 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"199ea57f0a16746aa51bf916aa81f5d4b4260799c6f9055a352a05496c21c831"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:50:44 crc kubenswrapper[4835]: I0202 17:50:44.871409 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://199ea57f0a16746aa51bf916aa81f5d4b4260799c6f9055a352a05496c21c831" gracePeriod=600 Feb 02 17:50:45 crc kubenswrapper[4835]: I0202 17:50:45.989861 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="199ea57f0a16746aa51bf916aa81f5d4b4260799c6f9055a352a05496c21c831" exitCode=0 Feb 02 17:50:45 crc kubenswrapper[4835]: I0202 17:50:45.989946 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"199ea57f0a16746aa51bf916aa81f5d4b4260799c6f9055a352a05496c21c831"} Feb 02 17:50:45 crc kubenswrapper[4835]: I0202 17:50:45.990464 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e"} Feb 02 17:50:45 crc kubenswrapper[4835]: I0202 17:50:45.990483 4835 scope.go:117] "RemoveContainer" containerID="49f5d12829ee82a6c5cab1302db0e2065cbad1386149b441f2974b9ad4d3bf1b" Feb 02 17:53:14 crc kubenswrapper[4835]: I0202 17:53:14.869692 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:53:14 crc kubenswrapper[4835]: I0202 17:53:14.870246 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:53:44 crc kubenswrapper[4835]: I0202 17:53:44.870475 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:53:44 crc kubenswrapper[4835]: I0202 17:53:44.871051 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.297212 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tv426"] Feb 02 17:54:12 crc kubenswrapper[4835]: E0202 17:54:12.298306 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerName="extract-utilities" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.298325 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerName="extract-utilities" Feb 02 17:54:12 crc kubenswrapper[4835]: E0202 17:54:12.298341 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerName="extract-content" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.298350 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerName="extract-content" Feb 02 17:54:12 crc kubenswrapper[4835]: E0202 17:54:12.298375 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerName="registry-server" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.298385 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerName="registry-server" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.298624 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="010af145-f70a-49a8-9e4d-bffaa05c95ce" containerName="registry-server" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.300410 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.320419 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tv426"] Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.453831 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-catalog-content\") pod \"redhat-operators-tv426\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.454057 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvpxk\" (UniqueName: \"kubernetes.io/projected/1372e58d-6da2-47ac-b3c7-679ac1b5b500-kube-api-access-bvpxk\") pod \"redhat-operators-tv426\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.454111 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-utilities\") pod \"redhat-operators-tv426\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.555521 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-catalog-content\") pod \"redhat-operators-tv426\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.555687 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvpxk\" (UniqueName: \"kubernetes.io/projected/1372e58d-6da2-47ac-b3c7-679ac1b5b500-kube-api-access-bvpxk\") pod \"redhat-operators-tv426\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.555721 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-utilities\") pod \"redhat-operators-tv426\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.556116 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-catalog-content\") pod \"redhat-operators-tv426\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.556177 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-utilities\") pod \"redhat-operators-tv426\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.589209 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvpxk\" (UniqueName: \"kubernetes.io/projected/1372e58d-6da2-47ac-b3c7-679ac1b5b500-kube-api-access-bvpxk\") pod \"redhat-operators-tv426\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:12 crc kubenswrapper[4835]: I0202 17:54:12.640541 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:13 crc kubenswrapper[4835]: I0202 17:54:13.715849 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tv426"] Feb 02 17:54:13 crc kubenswrapper[4835]: I0202 17:54:13.905397 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tv426" event={"ID":"1372e58d-6da2-47ac-b3c7-679ac1b5b500","Type":"ContainerStarted","Data":"6017e364044bc335d87fb54884c44d48b8ca58194108ca9ce06c390bf4707f07"} Feb 02 17:54:14 crc kubenswrapper[4835]: I0202 17:54:14.870668 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 17:54:14 crc kubenswrapper[4835]: I0202 17:54:14.871357 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 17:54:14 crc kubenswrapper[4835]: I0202 17:54:14.871424 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 17:54:14 crc kubenswrapper[4835]: I0202 17:54:14.872485 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 17:54:14 crc kubenswrapper[4835]: I0202 17:54:14.872589 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" gracePeriod=600 Feb 02 17:54:14 crc kubenswrapper[4835]: I0202 17:54:14.918791 4835 generic.go:334] "Generic (PLEG): container finished" podID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerID="062f78dcd870a908657b5ae6d4fbd8230f5deb2496edc70f2e1ce1eed8d827b3" exitCode=0 Feb 02 17:54:14 crc kubenswrapper[4835]: I0202 17:54:14.918839 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tv426" event={"ID":"1372e58d-6da2-47ac-b3c7-679ac1b5b500","Type":"ContainerDied","Data":"062f78dcd870a908657b5ae6d4fbd8230f5deb2496edc70f2e1ce1eed8d827b3"} Feb 02 17:54:14 crc kubenswrapper[4835]: I0202 17:54:14.921092 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 17:54:14 crc kubenswrapper[4835]: E0202 17:54:14.997346 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:54:15 crc kubenswrapper[4835]: I0202 17:54:15.931346 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" exitCode=0 Feb 02 17:54:15 crc kubenswrapper[4835]: I0202 17:54:15.931387 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e"} Feb 02 17:54:15 crc kubenswrapper[4835]: I0202 17:54:15.931735 4835 scope.go:117] "RemoveContainer" containerID="199ea57f0a16746aa51bf916aa81f5d4b4260799c6f9055a352a05496c21c831" Feb 02 17:54:15 crc kubenswrapper[4835]: I0202 17:54:15.932866 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:54:15 crc kubenswrapper[4835]: E0202 17:54:15.933267 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:54:16 crc kubenswrapper[4835]: I0202 17:54:16.943665 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tv426" event={"ID":"1372e58d-6da2-47ac-b3c7-679ac1b5b500","Type":"ContainerStarted","Data":"db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac"} Feb 02 17:54:17 crc kubenswrapper[4835]: I0202 17:54:17.954331 4835 generic.go:334] "Generic (PLEG): container finished" podID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerID="db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac" exitCode=0 Feb 02 17:54:17 crc kubenswrapper[4835]: I0202 17:54:17.954407 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tv426" event={"ID":"1372e58d-6da2-47ac-b3c7-679ac1b5b500","Type":"ContainerDied","Data":"db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac"} Feb 02 17:54:18 crc kubenswrapper[4835]: I0202 17:54:18.964682 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tv426" event={"ID":"1372e58d-6da2-47ac-b3c7-679ac1b5b500","Type":"ContainerStarted","Data":"2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab"} Feb 02 17:54:18 crc kubenswrapper[4835]: I0202 17:54:18.987375 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tv426" podStartSLOduration=3.527346347 podStartE2EDuration="6.987357609s" podCreationTimestamp="2026-02-02 17:54:12 +0000 UTC" firstStartedPulling="2026-02-02 17:54:14.920629119 +0000 UTC m=+3846.542233219" lastFinishedPulling="2026-02-02 17:54:18.380640401 +0000 UTC m=+3850.002244481" observedRunningTime="2026-02-02 17:54:18.981713469 +0000 UTC m=+3850.603317559" watchObservedRunningTime="2026-02-02 17:54:18.987357609 +0000 UTC m=+3850.608961689" Feb 02 17:54:22 crc kubenswrapper[4835]: I0202 17:54:22.642207 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:22 crc kubenswrapper[4835]: I0202 17:54:22.642733 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:23 crc kubenswrapper[4835]: I0202 17:54:23.688764 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tv426" podUID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerName="registry-server" probeResult="failure" output=< Feb 02 17:54:23 crc kubenswrapper[4835]: timeout: failed to connect service ":50051" within 1s Feb 02 17:54:23 crc kubenswrapper[4835]: > Feb 02 17:54:30 crc kubenswrapper[4835]: I0202 17:54:30.189042 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:54:30 crc kubenswrapper[4835]: E0202 17:54:30.189911 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:54:32 crc kubenswrapper[4835]: I0202 17:54:32.701263 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:32 crc kubenswrapper[4835]: I0202 17:54:32.760854 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:32 crc kubenswrapper[4835]: I0202 17:54:32.936130 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tv426"] Feb 02 17:54:34 crc kubenswrapper[4835]: I0202 17:54:34.132796 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tv426" podUID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerName="registry-server" containerID="cri-o://2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab" gracePeriod=2 Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.060871 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.144921 4835 generic.go:334] "Generic (PLEG): container finished" podID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerID="2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab" exitCode=0 Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.144975 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tv426" event={"ID":"1372e58d-6da2-47ac-b3c7-679ac1b5b500","Type":"ContainerDied","Data":"2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab"} Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.145006 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tv426" event={"ID":"1372e58d-6da2-47ac-b3c7-679ac1b5b500","Type":"ContainerDied","Data":"6017e364044bc335d87fb54884c44d48b8ca58194108ca9ce06c390bf4707f07"} Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.145025 4835 scope.go:117] "RemoveContainer" containerID="2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.145174 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tv426" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.169198 4835 scope.go:117] "RemoveContainer" containerID="db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.187235 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvpxk\" (UniqueName: \"kubernetes.io/projected/1372e58d-6da2-47ac-b3c7-679ac1b5b500-kube-api-access-bvpxk\") pod \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.187475 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-utilities\") pod \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.187568 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-catalog-content\") pod \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\" (UID: \"1372e58d-6da2-47ac-b3c7-679ac1b5b500\") " Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.192166 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-utilities" (OuterVolumeSpecName: "utilities") pod "1372e58d-6da2-47ac-b3c7-679ac1b5b500" (UID: "1372e58d-6da2-47ac-b3c7-679ac1b5b500"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.204462 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1372e58d-6da2-47ac-b3c7-679ac1b5b500-kube-api-access-bvpxk" (OuterVolumeSpecName: "kube-api-access-bvpxk") pod "1372e58d-6da2-47ac-b3c7-679ac1b5b500" (UID: "1372e58d-6da2-47ac-b3c7-679ac1b5b500"). InnerVolumeSpecName "kube-api-access-bvpxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.217020 4835 scope.go:117] "RemoveContainer" containerID="062f78dcd870a908657b5ae6d4fbd8230f5deb2496edc70f2e1ce1eed8d827b3" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.289864 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.290100 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvpxk\" (UniqueName: \"kubernetes.io/projected/1372e58d-6da2-47ac-b3c7-679ac1b5b500-kube-api-access-bvpxk\") on node \"crc\" DevicePath \"\"" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.307373 4835 scope.go:117] "RemoveContainer" containerID="2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab" Feb 02 17:54:35 crc kubenswrapper[4835]: E0202 17:54:35.307858 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab\": container with ID starting with 2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab not found: ID does not exist" containerID="2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.307914 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab"} err="failed to get container status \"2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab\": rpc error: code = NotFound desc = could not find container \"2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab\": container with ID starting with 2ca6b56654a09ba3447008bd8083eb0074895a4857844c766e9e1b4297c95bab not found: ID does not exist" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.307948 4835 scope.go:117] "RemoveContainer" containerID="db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac" Feb 02 17:54:35 crc kubenswrapper[4835]: E0202 17:54:35.308365 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac\": container with ID starting with db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac not found: ID does not exist" containerID="db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.308392 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac"} err="failed to get container status \"db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac\": rpc error: code = NotFound desc = could not find container \"db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac\": container with ID starting with db5f117d63ef6b584590b660846c4fe6674f70af017de0bc5dbd26347ca9c8ac not found: ID does not exist" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.308410 4835 scope.go:117] "RemoveContainer" containerID="062f78dcd870a908657b5ae6d4fbd8230f5deb2496edc70f2e1ce1eed8d827b3" Feb 02 17:54:35 crc kubenswrapper[4835]: E0202 17:54:35.308724 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"062f78dcd870a908657b5ae6d4fbd8230f5deb2496edc70f2e1ce1eed8d827b3\": container with ID starting with 062f78dcd870a908657b5ae6d4fbd8230f5deb2496edc70f2e1ce1eed8d827b3 not found: ID does not exist" containerID="062f78dcd870a908657b5ae6d4fbd8230f5deb2496edc70f2e1ce1eed8d827b3" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.308746 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"062f78dcd870a908657b5ae6d4fbd8230f5deb2496edc70f2e1ce1eed8d827b3"} err="failed to get container status \"062f78dcd870a908657b5ae6d4fbd8230f5deb2496edc70f2e1ce1eed8d827b3\": rpc error: code = NotFound desc = could not find container \"062f78dcd870a908657b5ae6d4fbd8230f5deb2496edc70f2e1ce1eed8d827b3\": container with ID starting with 062f78dcd870a908657b5ae6d4fbd8230f5deb2496edc70f2e1ce1eed8d827b3 not found: ID does not exist" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.316228 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1372e58d-6da2-47ac-b3c7-679ac1b5b500" (UID: "1372e58d-6da2-47ac-b3c7-679ac1b5b500"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.391366 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1372e58d-6da2-47ac-b3c7-679ac1b5b500-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.474497 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tv426"] Feb 02 17:54:35 crc kubenswrapper[4835]: I0202 17:54:35.486501 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tv426"] Feb 02 17:54:37 crc kubenswrapper[4835]: I0202 17:54:37.199300 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" path="/var/lib/kubelet/pods/1372e58d-6da2-47ac-b3c7-679ac1b5b500/volumes" Feb 02 17:54:44 crc kubenswrapper[4835]: I0202 17:54:44.188800 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:54:44 crc kubenswrapper[4835]: E0202 17:54:44.201633 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:54:59 crc kubenswrapper[4835]: I0202 17:54:59.197065 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:54:59 crc kubenswrapper[4835]: E0202 17:54:59.197849 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:55:12 crc kubenswrapper[4835]: I0202 17:55:12.189895 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:55:12 crc kubenswrapper[4835]: E0202 17:55:12.190592 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:55:26 crc kubenswrapper[4835]: I0202 17:55:26.192313 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:55:26 crc kubenswrapper[4835]: E0202 17:55:26.193726 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:55:26 crc kubenswrapper[4835]: I0202 17:55:26.888944 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t2c8c"] Feb 02 17:55:26 crc kubenswrapper[4835]: E0202 17:55:26.889397 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerName="registry-server" Feb 02 17:55:26 crc kubenswrapper[4835]: I0202 17:55:26.889419 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerName="registry-server" Feb 02 17:55:26 crc kubenswrapper[4835]: E0202 17:55:26.889431 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerName="extract-content" Feb 02 17:55:26 crc kubenswrapper[4835]: I0202 17:55:26.889438 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerName="extract-content" Feb 02 17:55:26 crc kubenswrapper[4835]: E0202 17:55:26.889468 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerName="extract-utilities" Feb 02 17:55:26 crc kubenswrapper[4835]: I0202 17:55:26.889476 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerName="extract-utilities" Feb 02 17:55:26 crc kubenswrapper[4835]: I0202 17:55:26.889659 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="1372e58d-6da2-47ac-b3c7-679ac1b5b500" containerName="registry-server" Feb 02 17:55:26 crc kubenswrapper[4835]: I0202 17:55:26.891014 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:26 crc kubenswrapper[4835]: I0202 17:55:26.905184 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t2c8c"] Feb 02 17:55:27 crc kubenswrapper[4835]: I0202 17:55:27.060172 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-catalog-content\") pod \"certified-operators-t2c8c\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:27 crc kubenswrapper[4835]: I0202 17:55:27.060477 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-utilities\") pod \"certified-operators-t2c8c\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:27 crc kubenswrapper[4835]: I0202 17:55:27.060611 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9njxb\" (UniqueName: \"kubernetes.io/projected/5bf54b79-2207-46cc-9908-86dc787114e6-kube-api-access-9njxb\") pod \"certified-operators-t2c8c\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:27 crc kubenswrapper[4835]: I0202 17:55:27.162266 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-utilities\") pod \"certified-operators-t2c8c\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:27 crc kubenswrapper[4835]: I0202 17:55:27.162380 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9njxb\" (UniqueName: \"kubernetes.io/projected/5bf54b79-2207-46cc-9908-86dc787114e6-kube-api-access-9njxb\") pod \"certified-operators-t2c8c\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:27 crc kubenswrapper[4835]: I0202 17:55:27.162569 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-catalog-content\") pod \"certified-operators-t2c8c\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:27 crc kubenswrapper[4835]: I0202 17:55:27.162801 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-utilities\") pod \"certified-operators-t2c8c\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:27 crc kubenswrapper[4835]: I0202 17:55:27.163008 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-catalog-content\") pod \"certified-operators-t2c8c\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:27 crc kubenswrapper[4835]: I0202 17:55:27.194155 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9njxb\" (UniqueName: \"kubernetes.io/projected/5bf54b79-2207-46cc-9908-86dc787114e6-kube-api-access-9njxb\") pod \"certified-operators-t2c8c\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:27 crc kubenswrapper[4835]: I0202 17:55:27.212767 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:27 crc kubenswrapper[4835]: I0202 17:55:27.879114 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t2c8c"] Feb 02 17:55:28 crc kubenswrapper[4835]: I0202 17:55:28.597484 4835 generic.go:334] "Generic (PLEG): container finished" podID="5bf54b79-2207-46cc-9908-86dc787114e6" containerID="020626e47160c3923c64ff79ea207e626cd623bba5817973509bbb57e1b5d882" exitCode=0 Feb 02 17:55:28 crc kubenswrapper[4835]: I0202 17:55:28.597540 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2c8c" event={"ID":"5bf54b79-2207-46cc-9908-86dc787114e6","Type":"ContainerDied","Data":"020626e47160c3923c64ff79ea207e626cd623bba5817973509bbb57e1b5d882"} Feb 02 17:55:28 crc kubenswrapper[4835]: I0202 17:55:28.598758 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2c8c" event={"ID":"5bf54b79-2207-46cc-9908-86dc787114e6","Type":"ContainerStarted","Data":"cbeaa49d0f973fdd51c0e2894fd4b00deb3f8b8377a844a3f65f37bfb10bc050"} Feb 02 17:55:30 crc kubenswrapper[4835]: I0202 17:55:30.613500 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2c8c" event={"ID":"5bf54b79-2207-46cc-9908-86dc787114e6","Type":"ContainerStarted","Data":"2701169431d226b09351ff13eca97095af8c7977dceaf5baf882030b9e98dda5"} Feb 02 17:55:31 crc kubenswrapper[4835]: I0202 17:55:31.622626 4835 generic.go:334] "Generic (PLEG): container finished" podID="5bf54b79-2207-46cc-9908-86dc787114e6" containerID="2701169431d226b09351ff13eca97095af8c7977dceaf5baf882030b9e98dda5" exitCode=0 Feb 02 17:55:31 crc kubenswrapper[4835]: I0202 17:55:31.622716 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2c8c" event={"ID":"5bf54b79-2207-46cc-9908-86dc787114e6","Type":"ContainerDied","Data":"2701169431d226b09351ff13eca97095af8c7977dceaf5baf882030b9e98dda5"} Feb 02 17:55:32 crc kubenswrapper[4835]: I0202 17:55:32.634616 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2c8c" event={"ID":"5bf54b79-2207-46cc-9908-86dc787114e6","Type":"ContainerStarted","Data":"fc1bdb805e5cb5ab08ca129ff5d4ada46331a98ce62de28bb43467d7cd87f6c5"} Feb 02 17:55:32 crc kubenswrapper[4835]: I0202 17:55:32.662185 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t2c8c" podStartSLOduration=3.013383532 podStartE2EDuration="6.662165666s" podCreationTimestamp="2026-02-02 17:55:26 +0000 UTC" firstStartedPulling="2026-02-02 17:55:28.599485671 +0000 UTC m=+3920.221089751" lastFinishedPulling="2026-02-02 17:55:32.248267805 +0000 UTC m=+3923.869871885" observedRunningTime="2026-02-02 17:55:32.660141319 +0000 UTC m=+3924.281745389" watchObservedRunningTime="2026-02-02 17:55:32.662165666 +0000 UTC m=+3924.283769746" Feb 02 17:55:37 crc kubenswrapper[4835]: I0202 17:55:37.190131 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:55:37 crc kubenswrapper[4835]: E0202 17:55:37.191588 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:55:37 crc kubenswrapper[4835]: I0202 17:55:37.212853 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:37 crc kubenswrapper[4835]: I0202 17:55:37.212914 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:37 crc kubenswrapper[4835]: I0202 17:55:37.265262 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:37 crc kubenswrapper[4835]: I0202 17:55:37.732359 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:37 crc kubenswrapper[4835]: I0202 17:55:37.809906 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t2c8c"] Feb 02 17:55:39 crc kubenswrapper[4835]: I0202 17:55:39.699134 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t2c8c" podUID="5bf54b79-2207-46cc-9908-86dc787114e6" containerName="registry-server" containerID="cri-o://fc1bdb805e5cb5ab08ca129ff5d4ada46331a98ce62de28bb43467d7cd87f6c5" gracePeriod=2 Feb 02 17:55:39 crc kubenswrapper[4835]: E0202 17:55:39.831471 4835 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5bf54b79_2207_46cc_9908_86dc787114e6.slice/crio-fc1bdb805e5cb5ab08ca129ff5d4ada46331a98ce62de28bb43467d7cd87f6c5.scope\": RecentStats: unable to find data in memory cache]" Feb 02 17:55:40 crc kubenswrapper[4835]: I0202 17:55:40.708310 4835 generic.go:334] "Generic (PLEG): container finished" podID="5bf54b79-2207-46cc-9908-86dc787114e6" containerID="fc1bdb805e5cb5ab08ca129ff5d4ada46331a98ce62de28bb43467d7cd87f6c5" exitCode=0 Feb 02 17:55:40 crc kubenswrapper[4835]: I0202 17:55:40.708652 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2c8c" event={"ID":"5bf54b79-2207-46cc-9908-86dc787114e6","Type":"ContainerDied","Data":"fc1bdb805e5cb5ab08ca129ff5d4ada46331a98ce62de28bb43467d7cd87f6c5"} Feb 02 17:55:40 crc kubenswrapper[4835]: I0202 17:55:40.931418 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:40 crc kubenswrapper[4835]: I0202 17:55:40.955360 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-utilities\") pod \"5bf54b79-2207-46cc-9908-86dc787114e6\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " Feb 02 17:55:40 crc kubenswrapper[4835]: I0202 17:55:40.955490 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-catalog-content\") pod \"5bf54b79-2207-46cc-9908-86dc787114e6\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " Feb 02 17:55:40 crc kubenswrapper[4835]: I0202 17:55:40.955543 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9njxb\" (UniqueName: \"kubernetes.io/projected/5bf54b79-2207-46cc-9908-86dc787114e6-kube-api-access-9njxb\") pod \"5bf54b79-2207-46cc-9908-86dc787114e6\" (UID: \"5bf54b79-2207-46cc-9908-86dc787114e6\") " Feb 02 17:55:40 crc kubenswrapper[4835]: I0202 17:55:40.956704 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-utilities" (OuterVolumeSpecName: "utilities") pod "5bf54b79-2207-46cc-9908-86dc787114e6" (UID: "5bf54b79-2207-46cc-9908-86dc787114e6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:55:40 crc kubenswrapper[4835]: I0202 17:55:40.978533 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bf54b79-2207-46cc-9908-86dc787114e6-kube-api-access-9njxb" (OuterVolumeSpecName: "kube-api-access-9njxb") pod "5bf54b79-2207-46cc-9908-86dc787114e6" (UID: "5bf54b79-2207-46cc-9908-86dc787114e6"). InnerVolumeSpecName "kube-api-access-9njxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:55:41 crc kubenswrapper[4835]: I0202 17:55:41.013526 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5bf54b79-2207-46cc-9908-86dc787114e6" (UID: "5bf54b79-2207-46cc-9908-86dc787114e6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:55:41 crc kubenswrapper[4835]: I0202 17:55:41.057748 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:55:41 crc kubenswrapper[4835]: I0202 17:55:41.057791 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bf54b79-2207-46cc-9908-86dc787114e6-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:55:41 crc kubenswrapper[4835]: I0202 17:55:41.057806 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9njxb\" (UniqueName: \"kubernetes.io/projected/5bf54b79-2207-46cc-9908-86dc787114e6-kube-api-access-9njxb\") on node \"crc\" DevicePath \"\"" Feb 02 17:55:41 crc kubenswrapper[4835]: I0202 17:55:41.722132 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2c8c" event={"ID":"5bf54b79-2207-46cc-9908-86dc787114e6","Type":"ContainerDied","Data":"cbeaa49d0f973fdd51c0e2894fd4b00deb3f8b8377a844a3f65f37bfb10bc050"} Feb 02 17:55:41 crc kubenswrapper[4835]: I0202 17:55:41.722462 4835 scope.go:117] "RemoveContainer" containerID="fc1bdb805e5cb5ab08ca129ff5d4ada46331a98ce62de28bb43467d7cd87f6c5" Feb 02 17:55:41 crc kubenswrapper[4835]: I0202 17:55:41.722239 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2c8c" Feb 02 17:55:41 crc kubenswrapper[4835]: I0202 17:55:41.753024 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t2c8c"] Feb 02 17:55:41 crc kubenswrapper[4835]: I0202 17:55:41.755234 4835 scope.go:117] "RemoveContainer" containerID="2701169431d226b09351ff13eca97095af8c7977dceaf5baf882030b9e98dda5" Feb 02 17:55:41 crc kubenswrapper[4835]: I0202 17:55:41.764311 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t2c8c"] Feb 02 17:55:41 crc kubenswrapper[4835]: I0202 17:55:41.779239 4835 scope.go:117] "RemoveContainer" containerID="020626e47160c3923c64ff79ea207e626cd623bba5817973509bbb57e1b5d882" Feb 02 17:55:43 crc kubenswrapper[4835]: I0202 17:55:43.199788 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bf54b79-2207-46cc-9908-86dc787114e6" path="/var/lib/kubelet/pods/5bf54b79-2207-46cc-9908-86dc787114e6/volumes" Feb 02 17:55:48 crc kubenswrapper[4835]: I0202 17:55:48.188209 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:55:48 crc kubenswrapper[4835]: E0202 17:55:48.189038 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:55:49 crc kubenswrapper[4835]: I0202 17:55:49.826820 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wgxgt"] Feb 02 17:55:49 crc kubenswrapper[4835]: E0202 17:55:49.827514 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bf54b79-2207-46cc-9908-86dc787114e6" containerName="extract-utilities" Feb 02 17:55:49 crc kubenswrapper[4835]: I0202 17:55:49.827530 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bf54b79-2207-46cc-9908-86dc787114e6" containerName="extract-utilities" Feb 02 17:55:49 crc kubenswrapper[4835]: E0202 17:55:49.827542 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bf54b79-2207-46cc-9908-86dc787114e6" containerName="registry-server" Feb 02 17:55:49 crc kubenswrapper[4835]: I0202 17:55:49.827549 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bf54b79-2207-46cc-9908-86dc787114e6" containerName="registry-server" Feb 02 17:55:49 crc kubenswrapper[4835]: E0202 17:55:49.827582 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bf54b79-2207-46cc-9908-86dc787114e6" containerName="extract-content" Feb 02 17:55:49 crc kubenswrapper[4835]: I0202 17:55:49.827589 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bf54b79-2207-46cc-9908-86dc787114e6" containerName="extract-content" Feb 02 17:55:49 crc kubenswrapper[4835]: I0202 17:55:49.827799 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bf54b79-2207-46cc-9908-86dc787114e6" containerName="registry-server" Feb 02 17:55:49 crc kubenswrapper[4835]: I0202 17:55:49.829168 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:55:49 crc kubenswrapper[4835]: I0202 17:55:49.849754 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wgxgt"] Feb 02 17:55:49 crc kubenswrapper[4835]: I0202 17:55:49.920435 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-utilities\") pod \"community-operators-wgxgt\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:55:49 crc kubenswrapper[4835]: I0202 17:55:49.920553 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s699s\" (UniqueName: \"kubernetes.io/projected/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-kube-api-access-s699s\") pod \"community-operators-wgxgt\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:55:49 crc kubenswrapper[4835]: I0202 17:55:49.920590 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-catalog-content\") pod \"community-operators-wgxgt\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:55:50 crc kubenswrapper[4835]: I0202 17:55:50.022704 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s699s\" (UniqueName: \"kubernetes.io/projected/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-kube-api-access-s699s\") pod \"community-operators-wgxgt\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:55:50 crc kubenswrapper[4835]: I0202 17:55:50.022785 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-catalog-content\") pod \"community-operators-wgxgt\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:55:50 crc kubenswrapper[4835]: I0202 17:55:50.023724 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-catalog-content\") pod \"community-operators-wgxgt\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:55:50 crc kubenswrapper[4835]: I0202 17:55:50.024359 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-utilities\") pod \"community-operators-wgxgt\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:55:50 crc kubenswrapper[4835]: I0202 17:55:50.024695 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-utilities\") pod \"community-operators-wgxgt\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:55:50 crc kubenswrapper[4835]: I0202 17:55:50.045296 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s699s\" (UniqueName: \"kubernetes.io/projected/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-kube-api-access-s699s\") pod \"community-operators-wgxgt\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:55:50 crc kubenswrapper[4835]: I0202 17:55:50.159019 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:55:50 crc kubenswrapper[4835]: I0202 17:55:50.765331 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wgxgt"] Feb 02 17:55:50 crc kubenswrapper[4835]: I0202 17:55:50.795148 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wgxgt" event={"ID":"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d","Type":"ContainerStarted","Data":"03f2c83ca3f83971c7894d774f1a3a54e375dd6cb76393cddd7015b0bc968f37"} Feb 02 17:55:51 crc kubenswrapper[4835]: I0202 17:55:51.805235 4835 generic.go:334] "Generic (PLEG): container finished" podID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" containerID="c52d4b1cb813d5d63392904c7bb2de383adb3a291a3e8744d1f40aee6df45de4" exitCode=0 Feb 02 17:55:51 crc kubenswrapper[4835]: I0202 17:55:51.805347 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wgxgt" event={"ID":"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d","Type":"ContainerDied","Data":"c52d4b1cb813d5d63392904c7bb2de383adb3a291a3e8744d1f40aee6df45de4"} Feb 02 17:55:53 crc kubenswrapper[4835]: I0202 17:55:53.822897 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wgxgt" event={"ID":"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d","Type":"ContainerStarted","Data":"22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388"} Feb 02 17:55:55 crc kubenswrapper[4835]: I0202 17:55:55.852129 4835 generic.go:334] "Generic (PLEG): container finished" podID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" containerID="22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388" exitCode=0 Feb 02 17:55:55 crc kubenswrapper[4835]: I0202 17:55:55.852475 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wgxgt" event={"ID":"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d","Type":"ContainerDied","Data":"22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388"} Feb 02 17:55:56 crc kubenswrapper[4835]: I0202 17:55:56.863300 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wgxgt" event={"ID":"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d","Type":"ContainerStarted","Data":"3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8"} Feb 02 17:55:56 crc kubenswrapper[4835]: I0202 17:55:56.887508 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wgxgt" podStartSLOduration=3.239229839 podStartE2EDuration="7.887479103s" podCreationTimestamp="2026-02-02 17:55:49 +0000 UTC" firstStartedPulling="2026-02-02 17:55:51.807354709 +0000 UTC m=+3943.428958789" lastFinishedPulling="2026-02-02 17:55:56.455603973 +0000 UTC m=+3948.077208053" observedRunningTime="2026-02-02 17:55:56.880926008 +0000 UTC m=+3948.502530088" watchObservedRunningTime="2026-02-02 17:55:56.887479103 +0000 UTC m=+3948.509083203" Feb 02 17:56:00 crc kubenswrapper[4835]: I0202 17:56:00.159425 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:56:00 crc kubenswrapper[4835]: I0202 17:56:00.159936 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:56:00 crc kubenswrapper[4835]: I0202 17:56:00.210226 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:56:03 crc kubenswrapper[4835]: I0202 17:56:03.188537 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:56:03 crc kubenswrapper[4835]: E0202 17:56:03.189460 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:56:10 crc kubenswrapper[4835]: I0202 17:56:10.216365 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:56:10 crc kubenswrapper[4835]: I0202 17:56:10.263736 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wgxgt"] Feb 02 17:56:10 crc kubenswrapper[4835]: I0202 17:56:10.978619 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wgxgt" podUID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" containerName="registry-server" containerID="cri-o://3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8" gracePeriod=2 Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.562186 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.687527 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s699s\" (UniqueName: \"kubernetes.io/projected/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-kube-api-access-s699s\") pod \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.687698 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-catalog-content\") pod \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.687727 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-utilities\") pod \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\" (UID: \"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d\") " Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.689084 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-utilities" (OuterVolumeSpecName: "utilities") pod "b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" (UID: "b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.701620 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-kube-api-access-s699s" (OuterVolumeSpecName: "kube-api-access-s699s") pod "b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" (UID: "b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d"). InnerVolumeSpecName "kube-api-access-s699s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.738166 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" (UID: "b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.790567 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s699s\" (UniqueName: \"kubernetes.io/projected/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-kube-api-access-s699s\") on node \"crc\" DevicePath \"\"" Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.790607 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.790617 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.991900 4835 generic.go:334] "Generic (PLEG): container finished" podID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" containerID="3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8" exitCode=0 Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.991959 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wgxgt" event={"ID":"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d","Type":"ContainerDied","Data":"3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8"} Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.991993 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wgxgt" Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.992000 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wgxgt" event={"ID":"b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d","Type":"ContainerDied","Data":"03f2c83ca3f83971c7894d774f1a3a54e375dd6cb76393cddd7015b0bc968f37"} Feb 02 17:56:11 crc kubenswrapper[4835]: I0202 17:56:11.992024 4835 scope.go:117] "RemoveContainer" containerID="3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8" Feb 02 17:56:12 crc kubenswrapper[4835]: I0202 17:56:12.013735 4835 scope.go:117] "RemoveContainer" containerID="22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388" Feb 02 17:56:12 crc kubenswrapper[4835]: I0202 17:56:12.036461 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wgxgt"] Feb 02 17:56:12 crc kubenswrapper[4835]: I0202 17:56:12.046674 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wgxgt"] Feb 02 17:56:12 crc kubenswrapper[4835]: I0202 17:56:12.053103 4835 scope.go:117] "RemoveContainer" containerID="c52d4b1cb813d5d63392904c7bb2de383adb3a291a3e8744d1f40aee6df45de4" Feb 02 17:56:12 crc kubenswrapper[4835]: I0202 17:56:12.082826 4835 scope.go:117] "RemoveContainer" containerID="3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8" Feb 02 17:56:12 crc kubenswrapper[4835]: E0202 17:56:12.083500 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8\": container with ID starting with 3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8 not found: ID does not exist" containerID="3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8" Feb 02 17:56:12 crc kubenswrapper[4835]: I0202 17:56:12.083541 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8"} err="failed to get container status \"3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8\": rpc error: code = NotFound desc = could not find container \"3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8\": container with ID starting with 3a6c7f8d81411e213fdcf911ed3411d32be9fa101a883ec2fb42fed2a3226de8 not found: ID does not exist" Feb 02 17:56:12 crc kubenswrapper[4835]: I0202 17:56:12.083567 4835 scope.go:117] "RemoveContainer" containerID="22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388" Feb 02 17:56:12 crc kubenswrapper[4835]: E0202 17:56:12.083957 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388\": container with ID starting with 22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388 not found: ID does not exist" containerID="22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388" Feb 02 17:56:12 crc kubenswrapper[4835]: I0202 17:56:12.084011 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388"} err="failed to get container status \"22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388\": rpc error: code = NotFound desc = could not find container \"22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388\": container with ID starting with 22d25ba2c2a89edfe516ccde1216aa7416c1687818ba3597107af313cd57b388 not found: ID does not exist" Feb 02 17:56:12 crc kubenswrapper[4835]: I0202 17:56:12.084043 4835 scope.go:117] "RemoveContainer" containerID="c52d4b1cb813d5d63392904c7bb2de383adb3a291a3e8744d1f40aee6df45de4" Feb 02 17:56:12 crc kubenswrapper[4835]: E0202 17:56:12.084311 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c52d4b1cb813d5d63392904c7bb2de383adb3a291a3e8744d1f40aee6df45de4\": container with ID starting with c52d4b1cb813d5d63392904c7bb2de383adb3a291a3e8744d1f40aee6df45de4 not found: ID does not exist" containerID="c52d4b1cb813d5d63392904c7bb2de383adb3a291a3e8744d1f40aee6df45de4" Feb 02 17:56:12 crc kubenswrapper[4835]: I0202 17:56:12.084353 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c52d4b1cb813d5d63392904c7bb2de383adb3a291a3e8744d1f40aee6df45de4"} err="failed to get container status \"c52d4b1cb813d5d63392904c7bb2de383adb3a291a3e8744d1f40aee6df45de4\": rpc error: code = NotFound desc = could not find container \"c52d4b1cb813d5d63392904c7bb2de383adb3a291a3e8744d1f40aee6df45de4\": container with ID starting with c52d4b1cb813d5d63392904c7bb2de383adb3a291a3e8744d1f40aee6df45de4 not found: ID does not exist" Feb 02 17:56:13 crc kubenswrapper[4835]: I0202 17:56:13.199680 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" path="/var/lib/kubelet/pods/b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d/volumes" Feb 02 17:56:17 crc kubenswrapper[4835]: I0202 17:56:17.189118 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:56:17 crc kubenswrapper[4835]: E0202 17:56:17.190614 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:56:28 crc kubenswrapper[4835]: I0202 17:56:28.819409 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ktgrs"] Feb 02 17:56:28 crc kubenswrapper[4835]: E0202 17:56:28.820391 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" containerName="extract-utilities" Feb 02 17:56:28 crc kubenswrapper[4835]: I0202 17:56:28.820409 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" containerName="extract-utilities" Feb 02 17:56:28 crc kubenswrapper[4835]: E0202 17:56:28.820431 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" containerName="registry-server" Feb 02 17:56:28 crc kubenswrapper[4835]: I0202 17:56:28.820441 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" containerName="registry-server" Feb 02 17:56:28 crc kubenswrapper[4835]: E0202 17:56:28.820474 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" containerName="extract-content" Feb 02 17:56:28 crc kubenswrapper[4835]: I0202 17:56:28.820481 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" containerName="extract-content" Feb 02 17:56:28 crc kubenswrapper[4835]: I0202 17:56:28.820738 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2f9975a-4dfc-4e72-8dcf-2d20ad9d8f8d" containerName="registry-server" Feb 02 17:56:28 crc kubenswrapper[4835]: I0202 17:56:28.822406 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:28 crc kubenswrapper[4835]: I0202 17:56:28.836712 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ktgrs"] Feb 02 17:56:28 crc kubenswrapper[4835]: I0202 17:56:28.941856 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jcm5\" (UniqueName: \"kubernetes.io/projected/cd847e5f-9278-4280-a37a-808c1b2619a4-kube-api-access-7jcm5\") pod \"redhat-marketplace-ktgrs\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:28 crc kubenswrapper[4835]: I0202 17:56:28.941917 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-utilities\") pod \"redhat-marketplace-ktgrs\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:28 crc kubenswrapper[4835]: I0202 17:56:28.942078 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-catalog-content\") pod \"redhat-marketplace-ktgrs\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:29 crc kubenswrapper[4835]: I0202 17:56:29.043497 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jcm5\" (UniqueName: \"kubernetes.io/projected/cd847e5f-9278-4280-a37a-808c1b2619a4-kube-api-access-7jcm5\") pod \"redhat-marketplace-ktgrs\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:29 crc kubenswrapper[4835]: I0202 17:56:29.043566 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-utilities\") pod \"redhat-marketplace-ktgrs\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:29 crc kubenswrapper[4835]: I0202 17:56:29.043704 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-catalog-content\") pod \"redhat-marketplace-ktgrs\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:29 crc kubenswrapper[4835]: I0202 17:56:29.044340 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-catalog-content\") pod \"redhat-marketplace-ktgrs\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:29 crc kubenswrapper[4835]: I0202 17:56:29.044338 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-utilities\") pod \"redhat-marketplace-ktgrs\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:29 crc kubenswrapper[4835]: I0202 17:56:29.074255 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jcm5\" (UniqueName: \"kubernetes.io/projected/cd847e5f-9278-4280-a37a-808c1b2619a4-kube-api-access-7jcm5\") pod \"redhat-marketplace-ktgrs\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:29 crc kubenswrapper[4835]: I0202 17:56:29.144564 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:29 crc kubenswrapper[4835]: I0202 17:56:29.198832 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:56:29 crc kubenswrapper[4835]: E0202 17:56:29.199095 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:56:29 crc kubenswrapper[4835]: I0202 17:56:29.662495 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ktgrs"] Feb 02 17:56:30 crc kubenswrapper[4835]: I0202 17:56:30.199487 4835 generic.go:334] "Generic (PLEG): container finished" podID="cd847e5f-9278-4280-a37a-808c1b2619a4" containerID="38c7b947ffea8b443ea5d7b64e03e4ccec8cbc4baf5122dd3df6a0cb5545514e" exitCode=0 Feb 02 17:56:30 crc kubenswrapper[4835]: I0202 17:56:30.199591 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktgrs" event={"ID":"cd847e5f-9278-4280-a37a-808c1b2619a4","Type":"ContainerDied","Data":"38c7b947ffea8b443ea5d7b64e03e4ccec8cbc4baf5122dd3df6a0cb5545514e"} Feb 02 17:56:30 crc kubenswrapper[4835]: I0202 17:56:30.199830 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktgrs" event={"ID":"cd847e5f-9278-4280-a37a-808c1b2619a4","Type":"ContainerStarted","Data":"e4d30d7a226ac809abb4cd73bea7a568b9a56257a0770930f6a9db5e3ed0f312"} Feb 02 17:56:31 crc kubenswrapper[4835]: I0202 17:56:31.216313 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktgrs" event={"ID":"cd847e5f-9278-4280-a37a-808c1b2619a4","Type":"ContainerStarted","Data":"7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8"} Feb 02 17:56:32 crc kubenswrapper[4835]: I0202 17:56:32.225236 4835 generic.go:334] "Generic (PLEG): container finished" podID="cd847e5f-9278-4280-a37a-808c1b2619a4" containerID="7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8" exitCode=0 Feb 02 17:56:32 crc kubenswrapper[4835]: I0202 17:56:32.226689 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktgrs" event={"ID":"cd847e5f-9278-4280-a37a-808c1b2619a4","Type":"ContainerDied","Data":"7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8"} Feb 02 17:56:33 crc kubenswrapper[4835]: I0202 17:56:33.243355 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktgrs" event={"ID":"cd847e5f-9278-4280-a37a-808c1b2619a4","Type":"ContainerStarted","Data":"58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c"} Feb 02 17:56:33 crc kubenswrapper[4835]: I0202 17:56:33.272230 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ktgrs" podStartSLOduration=2.756863857 podStartE2EDuration="5.27220739s" podCreationTimestamp="2026-02-02 17:56:28 +0000 UTC" firstStartedPulling="2026-02-02 17:56:30.201109692 +0000 UTC m=+3981.822713772" lastFinishedPulling="2026-02-02 17:56:32.716453225 +0000 UTC m=+3984.338057305" observedRunningTime="2026-02-02 17:56:33.263891095 +0000 UTC m=+3984.885495175" watchObservedRunningTime="2026-02-02 17:56:33.27220739 +0000 UTC m=+3984.893811470" Feb 02 17:56:39 crc kubenswrapper[4835]: I0202 17:56:39.145842 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:39 crc kubenswrapper[4835]: I0202 17:56:39.146466 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:39 crc kubenswrapper[4835]: I0202 17:56:39.203306 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:39 crc kubenswrapper[4835]: I0202 17:56:39.334375 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:39 crc kubenswrapper[4835]: I0202 17:56:39.441367 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ktgrs"] Feb 02 17:56:41 crc kubenswrapper[4835]: I0202 17:56:41.302735 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ktgrs" podUID="cd847e5f-9278-4280-a37a-808c1b2619a4" containerName="registry-server" containerID="cri-o://58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c" gracePeriod=2 Feb 02 17:56:41 crc kubenswrapper[4835]: W0202 17:56:41.345563 4835 helpers.go:245] readString: Failed to read "/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd847e5f_9278_4280_a37a_808c1b2619a4.slice/crio-e4d30d7a226ac809abb4cd73bea7a568b9a56257a0770930f6a9db5e3ed0f312/cpuset.cpus.effective": open /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd847e5f_9278_4280_a37a_808c1b2619a4.slice/crio-e4d30d7a226ac809abb4cd73bea7a568b9a56257a0770930f6a9db5e3ed0f312/cpuset.cpus.effective: no such device Feb 02 17:56:41 crc kubenswrapper[4835]: I0202 17:56:41.896095 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:41 crc kubenswrapper[4835]: I0202 17:56:41.991078 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-utilities\") pod \"cd847e5f-9278-4280-a37a-808c1b2619a4\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " Feb 02 17:56:41 crc kubenswrapper[4835]: I0202 17:56:41.991224 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jcm5\" (UniqueName: \"kubernetes.io/projected/cd847e5f-9278-4280-a37a-808c1b2619a4-kube-api-access-7jcm5\") pod \"cd847e5f-9278-4280-a37a-808c1b2619a4\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " Feb 02 17:56:41 crc kubenswrapper[4835]: I0202 17:56:41.991274 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-catalog-content\") pod \"cd847e5f-9278-4280-a37a-808c1b2619a4\" (UID: \"cd847e5f-9278-4280-a37a-808c1b2619a4\") " Feb 02 17:56:41 crc kubenswrapper[4835]: I0202 17:56:41.992101 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-utilities" (OuterVolumeSpecName: "utilities") pod "cd847e5f-9278-4280-a37a-808c1b2619a4" (UID: "cd847e5f-9278-4280-a37a-808c1b2619a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:56:41 crc kubenswrapper[4835]: I0202 17:56:41.996944 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd847e5f-9278-4280-a37a-808c1b2619a4-kube-api-access-7jcm5" (OuterVolumeSpecName: "kube-api-access-7jcm5") pod "cd847e5f-9278-4280-a37a-808c1b2619a4" (UID: "cd847e5f-9278-4280-a37a-808c1b2619a4"). InnerVolumeSpecName "kube-api-access-7jcm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.009828 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cd847e5f-9278-4280-a37a-808c1b2619a4" (UID: "cd847e5f-9278-4280-a37a-808c1b2619a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.093622 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.093657 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd847e5f-9278-4280-a37a-808c1b2619a4-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.093666 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jcm5\" (UniqueName: \"kubernetes.io/projected/cd847e5f-9278-4280-a37a-808c1b2619a4-kube-api-access-7jcm5\") on node \"crc\" DevicePath \"\"" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.188794 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:56:42 crc kubenswrapper[4835]: E0202 17:56:42.189204 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.311835 4835 generic.go:334] "Generic (PLEG): container finished" podID="cd847e5f-9278-4280-a37a-808c1b2619a4" containerID="58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c" exitCode=0 Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.311911 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktgrs" event={"ID":"cd847e5f-9278-4280-a37a-808c1b2619a4","Type":"ContainerDied","Data":"58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c"} Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.312153 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ktgrs" event={"ID":"cd847e5f-9278-4280-a37a-808c1b2619a4","Type":"ContainerDied","Data":"e4d30d7a226ac809abb4cd73bea7a568b9a56257a0770930f6a9db5e3ed0f312"} Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.311951 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ktgrs" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.312670 4835 scope.go:117] "RemoveContainer" containerID="58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.331603 4835 scope.go:117] "RemoveContainer" containerID="7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.362031 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ktgrs"] Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.368659 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ktgrs"] Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.382977 4835 scope.go:117] "RemoveContainer" containerID="38c7b947ffea8b443ea5d7b64e03e4ccec8cbc4baf5122dd3df6a0cb5545514e" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.445348 4835 scope.go:117] "RemoveContainer" containerID="58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c" Feb 02 17:56:42 crc kubenswrapper[4835]: E0202 17:56:42.445858 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c\": container with ID starting with 58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c not found: ID does not exist" containerID="58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.445904 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c"} err="failed to get container status \"58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c\": rpc error: code = NotFound desc = could not find container \"58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c\": container with ID starting with 58d47d80b797757a324f99659829bdd8daf504e77b07e4f07a550e8be13ddf6c not found: ID does not exist" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.445931 4835 scope.go:117] "RemoveContainer" containerID="7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8" Feb 02 17:56:42 crc kubenswrapper[4835]: E0202 17:56:42.446246 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8\": container with ID starting with 7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8 not found: ID does not exist" containerID="7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.446288 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8"} err="failed to get container status \"7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8\": rpc error: code = NotFound desc = could not find container \"7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8\": container with ID starting with 7344950a0e4716e004c94b422de741fed1e1e29ffa90ff9cbfc5e7037df724d8 not found: ID does not exist" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.446310 4835 scope.go:117] "RemoveContainer" containerID="38c7b947ffea8b443ea5d7b64e03e4ccec8cbc4baf5122dd3df6a0cb5545514e" Feb 02 17:56:42 crc kubenswrapper[4835]: E0202 17:56:42.446781 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38c7b947ffea8b443ea5d7b64e03e4ccec8cbc4baf5122dd3df6a0cb5545514e\": container with ID starting with 38c7b947ffea8b443ea5d7b64e03e4ccec8cbc4baf5122dd3df6a0cb5545514e not found: ID does not exist" containerID="38c7b947ffea8b443ea5d7b64e03e4ccec8cbc4baf5122dd3df6a0cb5545514e" Feb 02 17:56:42 crc kubenswrapper[4835]: I0202 17:56:42.446821 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38c7b947ffea8b443ea5d7b64e03e4ccec8cbc4baf5122dd3df6a0cb5545514e"} err="failed to get container status \"38c7b947ffea8b443ea5d7b64e03e4ccec8cbc4baf5122dd3df6a0cb5545514e\": rpc error: code = NotFound desc = could not find container \"38c7b947ffea8b443ea5d7b64e03e4ccec8cbc4baf5122dd3df6a0cb5545514e\": container with ID starting with 38c7b947ffea8b443ea5d7b64e03e4ccec8cbc4baf5122dd3df6a0cb5545514e not found: ID does not exist" Feb 02 17:56:43 crc kubenswrapper[4835]: I0202 17:56:43.198331 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd847e5f-9278-4280-a37a-808c1b2619a4" path="/var/lib/kubelet/pods/cd847e5f-9278-4280-a37a-808c1b2619a4/volumes" Feb 02 17:56:55 crc kubenswrapper[4835]: I0202 17:56:55.188903 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:56:55 crc kubenswrapper[4835]: E0202 17:56:55.189797 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:57:07 crc kubenswrapper[4835]: I0202 17:57:07.188608 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:57:07 crc kubenswrapper[4835]: E0202 17:57:07.190349 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:57:21 crc kubenswrapper[4835]: I0202 17:57:21.189427 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:57:21 crc kubenswrapper[4835]: E0202 17:57:21.190444 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:57:34 crc kubenswrapper[4835]: I0202 17:57:34.189215 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:57:34 crc kubenswrapper[4835]: E0202 17:57:34.190163 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:57:46 crc kubenswrapper[4835]: I0202 17:57:46.189158 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:57:46 crc kubenswrapper[4835]: E0202 17:57:46.190051 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:58:00 crc kubenswrapper[4835]: I0202 17:58:00.188527 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:58:00 crc kubenswrapper[4835]: E0202 17:58:00.189475 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:58:12 crc kubenswrapper[4835]: I0202 17:58:12.189099 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:58:12 crc kubenswrapper[4835]: E0202 17:58:12.189819 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:58:26 crc kubenswrapper[4835]: I0202 17:58:26.189227 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:58:26 crc kubenswrapper[4835]: E0202 17:58:26.189997 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:58:41 crc kubenswrapper[4835]: I0202 17:58:41.190053 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:58:41 crc kubenswrapper[4835]: E0202 17:58:41.190943 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:58:53 crc kubenswrapper[4835]: I0202 17:58:53.188850 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:58:53 crc kubenswrapper[4835]: E0202 17:58:53.189551 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:59:04 crc kubenswrapper[4835]: I0202 17:59:04.188943 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:59:04 crc kubenswrapper[4835]: E0202 17:59:04.189759 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 17:59:16 crc kubenswrapper[4835]: I0202 17:59:16.189139 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 17:59:16 crc kubenswrapper[4835]: I0202 17:59:16.993305 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"3e5d19b73694e7c39075ed4bdb14bd108b2271b3a8b41aae29ed31ae501ef6a9"} Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.186428 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824"] Feb 02 18:00:00 crc kubenswrapper[4835]: E0202 18:00:00.187843 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd847e5f-9278-4280-a37a-808c1b2619a4" containerName="extract-content" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.187870 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd847e5f-9278-4280-a37a-808c1b2619a4" containerName="extract-content" Feb 02 18:00:00 crc kubenswrapper[4835]: E0202 18:00:00.187908 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd847e5f-9278-4280-a37a-808c1b2619a4" containerName="extract-utilities" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.187927 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd847e5f-9278-4280-a37a-808c1b2619a4" containerName="extract-utilities" Feb 02 18:00:00 crc kubenswrapper[4835]: E0202 18:00:00.187982 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd847e5f-9278-4280-a37a-808c1b2619a4" containerName="registry-server" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.187993 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd847e5f-9278-4280-a37a-808c1b2619a4" containerName="registry-server" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.188249 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd847e5f-9278-4280-a37a-808c1b2619a4" containerName="registry-server" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.189437 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.191778 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.192065 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.203219 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824"] Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.358136 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b5f14c18-accf-4b44-81ca-95f814e125e0-secret-volume\") pod \"collect-profiles-29500920-zs824\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.358202 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b5f14c18-accf-4b44-81ca-95f814e125e0-config-volume\") pod \"collect-profiles-29500920-zs824\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.358666 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fhl6\" (UniqueName: \"kubernetes.io/projected/b5f14c18-accf-4b44-81ca-95f814e125e0-kube-api-access-2fhl6\") pod \"collect-profiles-29500920-zs824\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.460646 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fhl6\" (UniqueName: \"kubernetes.io/projected/b5f14c18-accf-4b44-81ca-95f814e125e0-kube-api-access-2fhl6\") pod \"collect-profiles-29500920-zs824\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.460772 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b5f14c18-accf-4b44-81ca-95f814e125e0-secret-volume\") pod \"collect-profiles-29500920-zs824\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.460825 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b5f14c18-accf-4b44-81ca-95f814e125e0-config-volume\") pod \"collect-profiles-29500920-zs824\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.461844 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b5f14c18-accf-4b44-81ca-95f814e125e0-config-volume\") pod \"collect-profiles-29500920-zs824\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.953207 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b5f14c18-accf-4b44-81ca-95f814e125e0-secret-volume\") pod \"collect-profiles-29500920-zs824\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:00 crc kubenswrapper[4835]: I0202 18:00:00.967393 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fhl6\" (UniqueName: \"kubernetes.io/projected/b5f14c18-accf-4b44-81ca-95f814e125e0-kube-api-access-2fhl6\") pod \"collect-profiles-29500920-zs824\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:01 crc kubenswrapper[4835]: I0202 18:00:01.118419 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:01 crc kubenswrapper[4835]: I0202 18:00:01.593655 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824"] Feb 02 18:00:02 crc kubenswrapper[4835]: I0202 18:00:02.396505 4835 generic.go:334] "Generic (PLEG): container finished" podID="b5f14c18-accf-4b44-81ca-95f814e125e0" containerID="829b55240a692c548d430e6019b5403deaf0852c9df42f4a1faf56d8719baca5" exitCode=0 Feb 02 18:00:02 crc kubenswrapper[4835]: I0202 18:00:02.396569 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" event={"ID":"b5f14c18-accf-4b44-81ca-95f814e125e0","Type":"ContainerDied","Data":"829b55240a692c548d430e6019b5403deaf0852c9df42f4a1faf56d8719baca5"} Feb 02 18:00:02 crc kubenswrapper[4835]: I0202 18:00:02.397082 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" event={"ID":"b5f14c18-accf-4b44-81ca-95f814e125e0","Type":"ContainerStarted","Data":"1e068ed75374aa419848ce521117e5eeb4a3d3be2d122cbbda2b0aa3684edeba"} Feb 02 18:00:03 crc kubenswrapper[4835]: I0202 18:00:03.929127 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.028498 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b5f14c18-accf-4b44-81ca-95f814e125e0-config-volume\") pod \"b5f14c18-accf-4b44-81ca-95f814e125e0\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.028562 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b5f14c18-accf-4b44-81ca-95f814e125e0-secret-volume\") pod \"b5f14c18-accf-4b44-81ca-95f814e125e0\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.028661 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fhl6\" (UniqueName: \"kubernetes.io/projected/b5f14c18-accf-4b44-81ca-95f814e125e0-kube-api-access-2fhl6\") pod \"b5f14c18-accf-4b44-81ca-95f814e125e0\" (UID: \"b5f14c18-accf-4b44-81ca-95f814e125e0\") " Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.029858 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5f14c18-accf-4b44-81ca-95f814e125e0-config-volume" (OuterVolumeSpecName: "config-volume") pod "b5f14c18-accf-4b44-81ca-95f814e125e0" (UID: "b5f14c18-accf-4b44-81ca-95f814e125e0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.035737 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5f14c18-accf-4b44-81ca-95f814e125e0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b5f14c18-accf-4b44-81ca-95f814e125e0" (UID: "b5f14c18-accf-4b44-81ca-95f814e125e0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.050960 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5f14c18-accf-4b44-81ca-95f814e125e0-kube-api-access-2fhl6" (OuterVolumeSpecName: "kube-api-access-2fhl6") pod "b5f14c18-accf-4b44-81ca-95f814e125e0" (UID: "b5f14c18-accf-4b44-81ca-95f814e125e0"). InnerVolumeSpecName "kube-api-access-2fhl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.131718 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fhl6\" (UniqueName: \"kubernetes.io/projected/b5f14c18-accf-4b44-81ca-95f814e125e0-kube-api-access-2fhl6\") on node \"crc\" DevicePath \"\"" Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.132020 4835 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b5f14c18-accf-4b44-81ca-95f814e125e0-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.132093 4835 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b5f14c18-accf-4b44-81ca-95f814e125e0-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.416567 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" event={"ID":"b5f14c18-accf-4b44-81ca-95f814e125e0","Type":"ContainerDied","Data":"1e068ed75374aa419848ce521117e5eeb4a3d3be2d122cbbda2b0aa3684edeba"} Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.416606 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e068ed75374aa419848ce521117e5eeb4a3d3be2d122cbbda2b0aa3684edeba" Feb 02 18:00:04 crc kubenswrapper[4835]: I0202 18:00:04.416653 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500920-zs824" Feb 02 18:00:05 crc kubenswrapper[4835]: I0202 18:00:05.025335 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87"] Feb 02 18:00:05 crc kubenswrapper[4835]: I0202 18:00:05.034407 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500875-q5x87"] Feb 02 18:00:05 crc kubenswrapper[4835]: I0202 18:00:05.202141 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa1b6455-7b51-4d7d-8cb8-35115bba7b0f" path="/var/lib/kubelet/pods/aa1b6455-7b51-4d7d-8cb8-35115bba7b0f/volumes" Feb 02 18:00:15 crc kubenswrapper[4835]: I0202 18:00:15.180424 4835 scope.go:117] "RemoveContainer" containerID="53dc8d7746aad81ef72b772ba618ea4c01a3e1675f43a25160f5eb2774888a56" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.165629 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29500921-22pwk"] Feb 02 18:01:00 crc kubenswrapper[4835]: E0202 18:01:00.166528 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5f14c18-accf-4b44-81ca-95f814e125e0" containerName="collect-profiles" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.166542 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5f14c18-accf-4b44-81ca-95f814e125e0" containerName="collect-profiles" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.166716 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5f14c18-accf-4b44-81ca-95f814e125e0" containerName="collect-profiles" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.167493 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.178415 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29500921-22pwk"] Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.222139 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-combined-ca-bundle\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.223680 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-config-data\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.223760 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-fernet-keys\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.223936 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qgxw\" (UniqueName: \"kubernetes.io/projected/9d457835-0e10-405d-af73-9ef35d8f24b4-kube-api-access-5qgxw\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.326067 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qgxw\" (UniqueName: \"kubernetes.io/projected/9d457835-0e10-405d-af73-9ef35d8f24b4-kube-api-access-5qgxw\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.326147 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-combined-ca-bundle\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.326227 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-config-data\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.326267 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-fernet-keys\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.332503 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-fernet-keys\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.332706 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-config-data\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.335464 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-combined-ca-bundle\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.349368 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qgxw\" (UniqueName: \"kubernetes.io/projected/9d457835-0e10-405d-af73-9ef35d8f24b4-kube-api-access-5qgxw\") pod \"keystone-cron-29500921-22pwk\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.485027 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:00 crc kubenswrapper[4835]: I0202 18:01:00.965384 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29500921-22pwk"] Feb 02 18:01:01 crc kubenswrapper[4835]: I0202 18:01:01.914377 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29500921-22pwk" event={"ID":"9d457835-0e10-405d-af73-9ef35d8f24b4","Type":"ContainerStarted","Data":"c917826ed6743ccb2fdd5e0d7c2c423bd523729cfc44ea2578b40c728c490dcb"} Feb 02 18:01:01 crc kubenswrapper[4835]: I0202 18:01:01.915858 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29500921-22pwk" event={"ID":"9d457835-0e10-405d-af73-9ef35d8f24b4","Type":"ContainerStarted","Data":"9fe82aee00f7d95a2f8a6a70515c48c5c9cc2af248e3900d053ee56e0c8f7566"} Feb 02 18:01:01 crc kubenswrapper[4835]: I0202 18:01:01.932391 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29500921-22pwk" podStartSLOduration=1.9323714779999999 podStartE2EDuration="1.932371478s" podCreationTimestamp="2026-02-02 18:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 18:01:01.93064826 +0000 UTC m=+4253.552252340" watchObservedRunningTime="2026-02-02 18:01:01.932371478 +0000 UTC m=+4253.553975558" Feb 02 18:01:04 crc kubenswrapper[4835]: I0202 18:01:04.939689 4835 generic.go:334] "Generic (PLEG): container finished" podID="9d457835-0e10-405d-af73-9ef35d8f24b4" containerID="c917826ed6743ccb2fdd5e0d7c2c423bd523729cfc44ea2578b40c728c490dcb" exitCode=0 Feb 02 18:01:04 crc kubenswrapper[4835]: I0202 18:01:04.939785 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29500921-22pwk" event={"ID":"9d457835-0e10-405d-af73-9ef35d8f24b4","Type":"ContainerDied","Data":"c917826ed6743ccb2fdd5e0d7c2c423bd523729cfc44ea2578b40c728c490dcb"} Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.349366 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.466008 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-config-data\") pod \"9d457835-0e10-405d-af73-9ef35d8f24b4\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.466070 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-combined-ca-bundle\") pod \"9d457835-0e10-405d-af73-9ef35d8f24b4\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.466187 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qgxw\" (UniqueName: \"kubernetes.io/projected/9d457835-0e10-405d-af73-9ef35d8f24b4-kube-api-access-5qgxw\") pod \"9d457835-0e10-405d-af73-9ef35d8f24b4\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.466323 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-fernet-keys\") pod \"9d457835-0e10-405d-af73-9ef35d8f24b4\" (UID: \"9d457835-0e10-405d-af73-9ef35d8f24b4\") " Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.472926 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9d457835-0e10-405d-af73-9ef35d8f24b4" (UID: "9d457835-0e10-405d-af73-9ef35d8f24b4"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.475051 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d457835-0e10-405d-af73-9ef35d8f24b4-kube-api-access-5qgxw" (OuterVolumeSpecName: "kube-api-access-5qgxw") pod "9d457835-0e10-405d-af73-9ef35d8f24b4" (UID: "9d457835-0e10-405d-af73-9ef35d8f24b4"). InnerVolumeSpecName "kube-api-access-5qgxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.496330 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d457835-0e10-405d-af73-9ef35d8f24b4" (UID: "9d457835-0e10-405d-af73-9ef35d8f24b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.526621 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-config-data" (OuterVolumeSpecName: "config-data") pod "9d457835-0e10-405d-af73-9ef35d8f24b4" (UID: "9d457835-0e10-405d-af73-9ef35d8f24b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.569186 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.569294 4835 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.569316 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qgxw\" (UniqueName: \"kubernetes.io/projected/9d457835-0e10-405d-af73-9ef35d8f24b4-kube-api-access-5qgxw\") on node \"crc\" DevicePath \"\"" Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.569333 4835 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9d457835-0e10-405d-af73-9ef35d8f24b4-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.956978 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29500921-22pwk" event={"ID":"9d457835-0e10-405d-af73-9ef35d8f24b4","Type":"ContainerDied","Data":"9fe82aee00f7d95a2f8a6a70515c48c5c9cc2af248e3900d053ee56e0c8f7566"} Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.957016 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9fe82aee00f7d95a2f8a6a70515c48c5c9cc2af248e3900d053ee56e0c8f7566" Feb 02 18:01:06 crc kubenswrapper[4835]: I0202 18:01:06.957051 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29500921-22pwk" Feb 02 18:01:44 crc kubenswrapper[4835]: I0202 18:01:44.870573 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:01:44 crc kubenswrapper[4835]: I0202 18:01:44.872543 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:02:11 crc kubenswrapper[4835]: I0202 18:02:11.517050 4835 generic.go:334] "Generic (PLEG): container finished" podID="0cfd7d28-c17f-4035-bd42-89b10e3c60eb" containerID="7db1f086ff62eb0e2c7cbb2960d3aa113927d0eac76c4774620f36beb67cae00" exitCode=0 Feb 02 18:02:11 crc kubenswrapper[4835]: I0202 18:02:11.517157 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"0cfd7d28-c17f-4035-bd42-89b10e3c60eb","Type":"ContainerDied","Data":"7db1f086ff62eb0e2c7cbb2960d3aa113927d0eac76c4774620f36beb67cae00"} Feb 02 18:02:12 crc kubenswrapper[4835]: I0202 18:02:12.899602 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 02 18:02:12 crc kubenswrapper[4835]: I0202 18:02:12.977939 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " Feb 02 18:02:12 crc kubenswrapper[4835]: I0202 18:02:12.978065 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ssh-key\") pod \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " Feb 02 18:02:12 crc kubenswrapper[4835]: I0202 18:02:12.978124 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-temporary\") pod \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " Feb 02 18:02:12 crc kubenswrapper[4835]: I0202 18:02:12.978167 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config-secret\") pod \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " Feb 02 18:02:12 crc kubenswrapper[4835]: I0202 18:02:12.978196 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config\") pod \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " Feb 02 18:02:12 crc kubenswrapper[4835]: I0202 18:02:12.978753 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "0cfd7d28-c17f-4035-bd42-89b10e3c60eb" (UID: "0cfd7d28-c17f-4035-bd42-89b10e3c60eb"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:02:12 crc kubenswrapper[4835]: I0202 18:02:12.979106 4835 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Feb 02 18:02:12 crc kubenswrapper[4835]: I0202 18:02:12.996574 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "test-operator-logs") pod "0cfd7d28-c17f-4035-bd42-89b10e3c60eb" (UID: "0cfd7d28-c17f-4035-bd42-89b10e3c60eb"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.020236 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0cfd7d28-c17f-4035-bd42-89b10e3c60eb" (UID: "0cfd7d28-c17f-4035-bd42-89b10e3c60eb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.027940 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "0cfd7d28-c17f-4035-bd42-89b10e3c60eb" (UID: "0cfd7d28-c17f-4035-bd42-89b10e3c60eb"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.049447 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "0cfd7d28-c17f-4035-bd42-89b10e3c60eb" (UID: "0cfd7d28-c17f-4035-bd42-89b10e3c60eb"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.079976 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xssh5\" (UniqueName: \"kubernetes.io/projected/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-kube-api-access-xssh5\") pod \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.080116 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-config-data\") pod \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.080156 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-workdir\") pod \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.080190 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ca-certs\") pod \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\" (UID: \"0cfd7d28-c17f-4035-bd42-89b10e3c60eb\") " Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.080509 4835 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.080524 4835 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ssh-key\") on node \"crc\" DevicePath \"\"" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.080533 4835 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.080542 4835 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-openstack-config\") on node \"crc\" DevicePath \"\"" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.083039 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-config-data" (OuterVolumeSpecName: "config-data") pod "0cfd7d28-c17f-4035-bd42-89b10e3c60eb" (UID: "0cfd7d28-c17f-4035-bd42-89b10e3c60eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.087059 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-kube-api-access-xssh5" (OuterVolumeSpecName: "kube-api-access-xssh5") pod "0cfd7d28-c17f-4035-bd42-89b10e3c60eb" (UID: "0cfd7d28-c17f-4035-bd42-89b10e3c60eb"). InnerVolumeSpecName "kube-api-access-xssh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.088229 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "0cfd7d28-c17f-4035-bd42-89b10e3c60eb" (UID: "0cfd7d28-c17f-4035-bd42-89b10e3c60eb"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.102104 4835 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.119785 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "0cfd7d28-c17f-4035-bd42-89b10e3c60eb" (UID: "0cfd7d28-c17f-4035-bd42-89b10e3c60eb"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.182032 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xssh5\" (UniqueName: \"kubernetes.io/projected/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-kube-api-access-xssh5\") on node \"crc\" DevicePath \"\"" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.182069 4835 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.182082 4835 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.182094 4835 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.182107 4835 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/0cfd7d28-c17f-4035-bd42-89b10e3c60eb-ca-certs\") on node \"crc\" DevicePath \"\"" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.547245 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"0cfd7d28-c17f-4035-bd42-89b10e3c60eb","Type":"ContainerDied","Data":"6e464d6c4de3bf6231156f09bce6c58e3818a9387ba4a690d707c21effcab8f8"} Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.547631 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e464d6c4de3bf6231156f09bce6c58e3818a9387ba4a690d707c21effcab8f8" Feb 02 18:02:13 crc kubenswrapper[4835]: I0202 18:02:13.547503 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 02 18:02:14 crc kubenswrapper[4835]: I0202 18:02:14.870643 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:02:14 crc kubenswrapper[4835]: I0202 18:02:14.870711 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.349264 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Feb 02 18:02:21 crc kubenswrapper[4835]: E0202 18:02:21.350549 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cfd7d28-c17f-4035-bd42-89b10e3c60eb" containerName="tempest-tests-tempest-tests-runner" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.350568 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cfd7d28-c17f-4035-bd42-89b10e3c60eb" containerName="tempest-tests-tempest-tests-runner" Feb 02 18:02:21 crc kubenswrapper[4835]: E0202 18:02:21.350631 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d457835-0e10-405d-af73-9ef35d8f24b4" containerName="keystone-cron" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.350642 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d457835-0e10-405d-af73-9ef35d8f24b4" containerName="keystone-cron" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.350973 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d457835-0e10-405d-af73-9ef35d8f24b4" containerName="keystone-cron" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.351022 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cfd7d28-c17f-4035-bd42-89b10e3c60eb" containerName="tempest-tests-tempest-tests-runner" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.352294 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.357515 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.366110 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-m7gcn" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.542390 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njchx\" (UniqueName: \"kubernetes.io/projected/538dadbd-5539-459e-9939-f078b6bdda38-kube-api-access-njchx\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"538dadbd-5539-459e-9939-f078b6bdda38\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.542478 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"538dadbd-5539-459e-9939-f078b6bdda38\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.643716 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"538dadbd-5539-459e-9939-f078b6bdda38\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.643862 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njchx\" (UniqueName: \"kubernetes.io/projected/538dadbd-5539-459e-9939-f078b6bdda38-kube-api-access-njchx\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"538dadbd-5539-459e-9939-f078b6bdda38\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.644142 4835 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"538dadbd-5539-459e-9939-f078b6bdda38\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.669583 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njchx\" (UniqueName: \"kubernetes.io/projected/538dadbd-5539-459e-9939-f078b6bdda38-kube-api-access-njchx\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"538dadbd-5539-459e-9939-f078b6bdda38\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.671598 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"538dadbd-5539-459e-9939-f078b6bdda38\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 18:02:21 crc kubenswrapper[4835]: I0202 18:02:21.684797 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 18:02:22 crc kubenswrapper[4835]: I0202 18:02:22.127910 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Feb 02 18:02:22 crc kubenswrapper[4835]: I0202 18:02:22.137783 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 18:02:22 crc kubenswrapper[4835]: I0202 18:02:22.629022 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"538dadbd-5539-459e-9939-f078b6bdda38","Type":"ContainerStarted","Data":"c0eac8a1f38a0b4eab5754a0b6cb4fe67c46d9b394398675bb0b77de526ee256"} Feb 02 18:02:23 crc kubenswrapper[4835]: I0202 18:02:23.640721 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"538dadbd-5539-459e-9939-f078b6bdda38","Type":"ContainerStarted","Data":"f7db58c73b5e03b396bdaf8a6dbd5438a44a3cd5024c8a2bd96f8f5d512456bd"} Feb 02 18:02:23 crc kubenswrapper[4835]: I0202 18:02:23.666357 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.8341058559999999 podStartE2EDuration="2.666329853s" podCreationTimestamp="2026-02-02 18:02:21 +0000 UTC" firstStartedPulling="2026-02-02 18:02:22.137525495 +0000 UTC m=+4333.759129595" lastFinishedPulling="2026-02-02 18:02:22.969749512 +0000 UTC m=+4334.591353592" observedRunningTime="2026-02-02 18:02:23.665819128 +0000 UTC m=+4335.287423208" watchObservedRunningTime="2026-02-02 18:02:23.666329853 +0000 UTC m=+4335.287933933" Feb 02 18:02:44 crc kubenswrapper[4835]: I0202 18:02:44.870863 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:02:44 crc kubenswrapper[4835]: I0202 18:02:44.871384 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:02:44 crc kubenswrapper[4835]: I0202 18:02:44.871427 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 18:02:44 crc kubenswrapper[4835]: I0202 18:02:44.872118 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3e5d19b73694e7c39075ed4bdb14bd108b2271b3a8b41aae29ed31ae501ef6a9"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 18:02:44 crc kubenswrapper[4835]: I0202 18:02:44.872167 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://3e5d19b73694e7c39075ed4bdb14bd108b2271b3a8b41aae29ed31ae501ef6a9" gracePeriod=600 Feb 02 18:02:45 crc kubenswrapper[4835]: I0202 18:02:45.844454 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="3e5d19b73694e7c39075ed4bdb14bd108b2271b3a8b41aae29ed31ae501ef6a9" exitCode=0 Feb 02 18:02:45 crc kubenswrapper[4835]: I0202 18:02:45.844545 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"3e5d19b73694e7c39075ed4bdb14bd108b2271b3a8b41aae29ed31ae501ef6a9"} Feb 02 18:02:45 crc kubenswrapper[4835]: I0202 18:02:45.845070 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf"} Feb 02 18:02:45 crc kubenswrapper[4835]: I0202 18:02:45.845099 4835 scope.go:117] "RemoveContainer" containerID="27dcc667e61de047177a67e5164b103f8578ebafd9dd565788a6d87261f4583e" Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.361894 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-t87f4/must-gather-x7xcb"] Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.364095 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/must-gather-x7xcb" Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.366793 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-t87f4"/"openshift-service-ca.crt" Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.367148 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-t87f4"/"kube-root-ca.crt" Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.367320 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-t87f4"/"default-dockercfg-2v6dv" Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.374868 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-t87f4/must-gather-x7xcb"] Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.467432 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3a699053-df9a-495a-83bb-7f1612f0e615-must-gather-output\") pod \"must-gather-x7xcb\" (UID: \"3a699053-df9a-495a-83bb-7f1612f0e615\") " pod="openshift-must-gather-t87f4/must-gather-x7xcb" Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.467479 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhdhn\" (UniqueName: \"kubernetes.io/projected/3a699053-df9a-495a-83bb-7f1612f0e615-kube-api-access-vhdhn\") pod \"must-gather-x7xcb\" (UID: \"3a699053-df9a-495a-83bb-7f1612f0e615\") " pod="openshift-must-gather-t87f4/must-gather-x7xcb" Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.569614 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3a699053-df9a-495a-83bb-7f1612f0e615-must-gather-output\") pod \"must-gather-x7xcb\" (UID: \"3a699053-df9a-495a-83bb-7f1612f0e615\") " pod="openshift-must-gather-t87f4/must-gather-x7xcb" Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.569679 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhdhn\" (UniqueName: \"kubernetes.io/projected/3a699053-df9a-495a-83bb-7f1612f0e615-kube-api-access-vhdhn\") pod \"must-gather-x7xcb\" (UID: \"3a699053-df9a-495a-83bb-7f1612f0e615\") " pod="openshift-must-gather-t87f4/must-gather-x7xcb" Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.570217 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3a699053-df9a-495a-83bb-7f1612f0e615-must-gather-output\") pod \"must-gather-x7xcb\" (UID: \"3a699053-df9a-495a-83bb-7f1612f0e615\") " pod="openshift-must-gather-t87f4/must-gather-x7xcb" Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.591048 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhdhn\" (UniqueName: \"kubernetes.io/projected/3a699053-df9a-495a-83bb-7f1612f0e615-kube-api-access-vhdhn\") pod \"must-gather-x7xcb\" (UID: \"3a699053-df9a-495a-83bb-7f1612f0e615\") " pod="openshift-must-gather-t87f4/must-gather-x7xcb" Feb 02 18:02:47 crc kubenswrapper[4835]: I0202 18:02:47.690694 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/must-gather-x7xcb" Feb 02 18:02:48 crc kubenswrapper[4835]: I0202 18:02:48.111775 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-t87f4/must-gather-x7xcb"] Feb 02 18:02:48 crc kubenswrapper[4835]: I0202 18:02:48.878392 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/must-gather-x7xcb" event={"ID":"3a699053-df9a-495a-83bb-7f1612f0e615","Type":"ContainerStarted","Data":"93f27967e5f387c7986168d8f7e8caa6a3d4b9644251271880967b814a6a9ea9"} Feb 02 18:02:53 crc kubenswrapper[4835]: I0202 18:02:53.922704 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/must-gather-x7xcb" event={"ID":"3a699053-df9a-495a-83bb-7f1612f0e615","Type":"ContainerStarted","Data":"335122e7f22ef4afa318583565477d9694243c921634eba058a35f7673fb4393"} Feb 02 18:02:53 crc kubenswrapper[4835]: I0202 18:02:53.923115 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/must-gather-x7xcb" event={"ID":"3a699053-df9a-495a-83bb-7f1612f0e615","Type":"ContainerStarted","Data":"441c57423a12074e541e5032a956e37cee4b3ba6a3e518d28986354b89486de0"} Feb 02 18:02:53 crc kubenswrapper[4835]: I0202 18:02:53.938520 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-t87f4/must-gather-x7xcb" podStartSLOduration=2.107395 podStartE2EDuration="6.93850639s" podCreationTimestamp="2026-02-02 18:02:47 +0000 UTC" firstStartedPulling="2026-02-02 18:02:48.114935787 +0000 UTC m=+4359.736539867" lastFinishedPulling="2026-02-02 18:02:52.946047177 +0000 UTC m=+4364.567651257" observedRunningTime="2026-02-02 18:02:53.935729811 +0000 UTC m=+4365.557333891" watchObservedRunningTime="2026-02-02 18:02:53.93850639 +0000 UTC m=+4365.560110470" Feb 02 18:02:58 crc kubenswrapper[4835]: I0202 18:02:58.540837 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-t87f4/crc-debug-fxdg2"] Feb 02 18:02:58 crc kubenswrapper[4835]: I0202 18:02:58.542811 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-fxdg2" Feb 02 18:02:58 crc kubenswrapper[4835]: I0202 18:02:58.692695 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5nbc\" (UniqueName: \"kubernetes.io/projected/f433514f-669d-4945-98ad-f3e0be2af89c-kube-api-access-z5nbc\") pod \"crc-debug-fxdg2\" (UID: \"f433514f-669d-4945-98ad-f3e0be2af89c\") " pod="openshift-must-gather-t87f4/crc-debug-fxdg2" Feb 02 18:02:58 crc kubenswrapper[4835]: I0202 18:02:58.692818 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f433514f-669d-4945-98ad-f3e0be2af89c-host\") pod \"crc-debug-fxdg2\" (UID: \"f433514f-669d-4945-98ad-f3e0be2af89c\") " pod="openshift-must-gather-t87f4/crc-debug-fxdg2" Feb 02 18:02:58 crc kubenswrapper[4835]: I0202 18:02:58.795056 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5nbc\" (UniqueName: \"kubernetes.io/projected/f433514f-669d-4945-98ad-f3e0be2af89c-kube-api-access-z5nbc\") pod \"crc-debug-fxdg2\" (UID: \"f433514f-669d-4945-98ad-f3e0be2af89c\") " pod="openshift-must-gather-t87f4/crc-debug-fxdg2" Feb 02 18:02:58 crc kubenswrapper[4835]: I0202 18:02:58.795196 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f433514f-669d-4945-98ad-f3e0be2af89c-host\") pod \"crc-debug-fxdg2\" (UID: \"f433514f-669d-4945-98ad-f3e0be2af89c\") " pod="openshift-must-gather-t87f4/crc-debug-fxdg2" Feb 02 18:02:58 crc kubenswrapper[4835]: I0202 18:02:58.795353 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f433514f-669d-4945-98ad-f3e0be2af89c-host\") pod \"crc-debug-fxdg2\" (UID: \"f433514f-669d-4945-98ad-f3e0be2af89c\") " pod="openshift-must-gather-t87f4/crc-debug-fxdg2" Feb 02 18:02:58 crc kubenswrapper[4835]: I0202 18:02:58.825295 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5nbc\" (UniqueName: \"kubernetes.io/projected/f433514f-669d-4945-98ad-f3e0be2af89c-kube-api-access-z5nbc\") pod \"crc-debug-fxdg2\" (UID: \"f433514f-669d-4945-98ad-f3e0be2af89c\") " pod="openshift-must-gather-t87f4/crc-debug-fxdg2" Feb 02 18:02:58 crc kubenswrapper[4835]: I0202 18:02:58.865263 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-fxdg2" Feb 02 18:02:58 crc kubenswrapper[4835]: W0202 18:02:58.904526 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf433514f_669d_4945_98ad_f3e0be2af89c.slice/crio-b3a82d219c5d0cbe43f14491e27d46a87eef1ec2a36b6bb544689a36f87e28e6 WatchSource:0}: Error finding container b3a82d219c5d0cbe43f14491e27d46a87eef1ec2a36b6bb544689a36f87e28e6: Status 404 returned error can't find the container with id b3a82d219c5d0cbe43f14491e27d46a87eef1ec2a36b6bb544689a36f87e28e6 Feb 02 18:02:58 crc kubenswrapper[4835]: I0202 18:02:58.973615 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/crc-debug-fxdg2" event={"ID":"f433514f-669d-4945-98ad-f3e0be2af89c","Type":"ContainerStarted","Data":"b3a82d219c5d0cbe43f14491e27d46a87eef1ec2a36b6bb544689a36f87e28e6"} Feb 02 18:03:11 crc kubenswrapper[4835]: I0202 18:03:11.082699 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/crc-debug-fxdg2" event={"ID":"f433514f-669d-4945-98ad-f3e0be2af89c","Type":"ContainerStarted","Data":"23055114035ca51f5a8ba9c5550a1ffc3abf3325cddd095369a94c8bdadb35da"} Feb 02 18:03:11 crc kubenswrapper[4835]: I0202 18:03:11.106200 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-t87f4/crc-debug-fxdg2" podStartSLOduration=1.856122591 podStartE2EDuration="13.106174947s" podCreationTimestamp="2026-02-02 18:02:58 +0000 UTC" firstStartedPulling="2026-02-02 18:02:58.906297025 +0000 UTC m=+4370.527901105" lastFinishedPulling="2026-02-02 18:03:10.156349381 +0000 UTC m=+4381.777953461" observedRunningTime="2026-02-02 18:03:11.103579804 +0000 UTC m=+4382.725183884" watchObservedRunningTime="2026-02-02 18:03:11.106174947 +0000 UTC m=+4382.727779027" Feb 02 18:04:05 crc kubenswrapper[4835]: I0202 18:04:05.524457 4835 generic.go:334] "Generic (PLEG): container finished" podID="f433514f-669d-4945-98ad-f3e0be2af89c" containerID="23055114035ca51f5a8ba9c5550a1ffc3abf3325cddd095369a94c8bdadb35da" exitCode=0 Feb 02 18:04:05 crc kubenswrapper[4835]: I0202 18:04:05.524554 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/crc-debug-fxdg2" event={"ID":"f433514f-669d-4945-98ad-f3e0be2af89c","Type":"ContainerDied","Data":"23055114035ca51f5a8ba9c5550a1ffc3abf3325cddd095369a94c8bdadb35da"} Feb 02 18:04:06 crc kubenswrapper[4835]: I0202 18:04:06.642516 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-fxdg2" Feb 02 18:04:06 crc kubenswrapper[4835]: I0202 18:04:06.686487 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-t87f4/crc-debug-fxdg2"] Feb 02 18:04:06 crc kubenswrapper[4835]: I0202 18:04:06.694953 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-t87f4/crc-debug-fxdg2"] Feb 02 18:04:06 crc kubenswrapper[4835]: I0202 18:04:06.838506 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5nbc\" (UniqueName: \"kubernetes.io/projected/f433514f-669d-4945-98ad-f3e0be2af89c-kube-api-access-z5nbc\") pod \"f433514f-669d-4945-98ad-f3e0be2af89c\" (UID: \"f433514f-669d-4945-98ad-f3e0be2af89c\") " Feb 02 18:04:06 crc kubenswrapper[4835]: I0202 18:04:06.838597 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f433514f-669d-4945-98ad-f3e0be2af89c-host\") pod \"f433514f-669d-4945-98ad-f3e0be2af89c\" (UID: \"f433514f-669d-4945-98ad-f3e0be2af89c\") " Feb 02 18:04:06 crc kubenswrapper[4835]: I0202 18:04:06.838750 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f433514f-669d-4945-98ad-f3e0be2af89c-host" (OuterVolumeSpecName: "host") pod "f433514f-669d-4945-98ad-f3e0be2af89c" (UID: "f433514f-669d-4945-98ad-f3e0be2af89c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 18:04:06 crc kubenswrapper[4835]: I0202 18:04:06.838992 4835 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f433514f-669d-4945-98ad-f3e0be2af89c-host\") on node \"crc\" DevicePath \"\"" Feb 02 18:04:06 crc kubenswrapper[4835]: I0202 18:04:06.843922 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f433514f-669d-4945-98ad-f3e0be2af89c-kube-api-access-z5nbc" (OuterVolumeSpecName: "kube-api-access-z5nbc") pod "f433514f-669d-4945-98ad-f3e0be2af89c" (UID: "f433514f-669d-4945-98ad-f3e0be2af89c"). InnerVolumeSpecName "kube-api-access-z5nbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:04:06 crc kubenswrapper[4835]: I0202 18:04:06.940322 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5nbc\" (UniqueName: \"kubernetes.io/projected/f433514f-669d-4945-98ad-f3e0be2af89c-kube-api-access-z5nbc\") on node \"crc\" DevicePath \"\"" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.201052 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f433514f-669d-4945-98ad-f3e0be2af89c" path="/var/lib/kubelet/pods/f433514f-669d-4945-98ad-f3e0be2af89c/volumes" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.544458 4835 scope.go:117] "RemoveContainer" containerID="23055114035ca51f5a8ba9c5550a1ffc3abf3325cddd095369a94c8bdadb35da" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.544504 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-fxdg2" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.825092 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-t87f4/crc-debug-9479b"] Feb 02 18:04:07 crc kubenswrapper[4835]: E0202 18:04:07.826100 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f433514f-669d-4945-98ad-f3e0be2af89c" containerName="container-00" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.826214 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="f433514f-669d-4945-98ad-f3e0be2af89c" containerName="container-00" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.826482 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="f433514f-669d-4945-98ad-f3e0be2af89c" containerName="container-00" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.827222 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-9479b" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.861688 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6jr6\" (UniqueName: \"kubernetes.io/projected/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-kube-api-access-s6jr6\") pod \"crc-debug-9479b\" (UID: \"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71\") " pod="openshift-must-gather-t87f4/crc-debug-9479b" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.861984 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-host\") pod \"crc-debug-9479b\" (UID: \"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71\") " pod="openshift-must-gather-t87f4/crc-debug-9479b" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.963864 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-host\") pod \"crc-debug-9479b\" (UID: \"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71\") " pod="openshift-must-gather-t87f4/crc-debug-9479b" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.964025 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6jr6\" (UniqueName: \"kubernetes.io/projected/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-kube-api-access-s6jr6\") pod \"crc-debug-9479b\" (UID: \"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71\") " pod="openshift-must-gather-t87f4/crc-debug-9479b" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.964224 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-host\") pod \"crc-debug-9479b\" (UID: \"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71\") " pod="openshift-must-gather-t87f4/crc-debug-9479b" Feb 02 18:04:07 crc kubenswrapper[4835]: I0202 18:04:07.980942 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6jr6\" (UniqueName: \"kubernetes.io/projected/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-kube-api-access-s6jr6\") pod \"crc-debug-9479b\" (UID: \"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71\") " pod="openshift-must-gather-t87f4/crc-debug-9479b" Feb 02 18:04:08 crc kubenswrapper[4835]: I0202 18:04:08.142088 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-9479b" Feb 02 18:04:08 crc kubenswrapper[4835]: I0202 18:04:08.556084 4835 generic.go:334] "Generic (PLEG): container finished" podID="8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71" containerID="2b4f80a547d47e31da1087af82c95eef7b684c86f5e9183e24db3b894571d136" exitCode=0 Feb 02 18:04:08 crc kubenswrapper[4835]: I0202 18:04:08.556184 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/crc-debug-9479b" event={"ID":"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71","Type":"ContainerDied","Data":"2b4f80a547d47e31da1087af82c95eef7b684c86f5e9183e24db3b894571d136"} Feb 02 18:04:08 crc kubenswrapper[4835]: I0202 18:04:08.556479 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/crc-debug-9479b" event={"ID":"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71","Type":"ContainerStarted","Data":"2f112887ba6f978f869fa652f6a632d703b1c2205169ec8fd576a03284aa65e1"} Feb 02 18:04:09 crc kubenswrapper[4835]: I0202 18:04:09.670249 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-9479b" Feb 02 18:04:09 crc kubenswrapper[4835]: I0202 18:04:09.688304 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-host\") pod \"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71\" (UID: \"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71\") " Feb 02 18:04:09 crc kubenswrapper[4835]: I0202 18:04:09.688360 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6jr6\" (UniqueName: \"kubernetes.io/projected/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-kube-api-access-s6jr6\") pod \"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71\" (UID: \"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71\") " Feb 02 18:04:09 crc kubenswrapper[4835]: I0202 18:04:09.688423 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-host" (OuterVolumeSpecName: "host") pod "8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71" (UID: "8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 18:04:09 crc kubenswrapper[4835]: I0202 18:04:09.688693 4835 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-host\") on node \"crc\" DevicePath \"\"" Feb 02 18:04:09 crc kubenswrapper[4835]: I0202 18:04:09.702285 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-kube-api-access-s6jr6" (OuterVolumeSpecName: "kube-api-access-s6jr6") pod "8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71" (UID: "8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71"). InnerVolumeSpecName "kube-api-access-s6jr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:04:09 crc kubenswrapper[4835]: I0202 18:04:09.789627 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6jr6\" (UniqueName: \"kubernetes.io/projected/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71-kube-api-access-s6jr6\") on node \"crc\" DevicePath \"\"" Feb 02 18:04:10 crc kubenswrapper[4835]: I0202 18:04:10.578691 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/crc-debug-9479b" event={"ID":"8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71","Type":"ContainerDied","Data":"2f112887ba6f978f869fa652f6a632d703b1c2205169ec8fd576a03284aa65e1"} Feb 02 18:04:10 crc kubenswrapper[4835]: I0202 18:04:10.579036 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f112887ba6f978f869fa652f6a632d703b1c2205169ec8fd576a03284aa65e1" Feb 02 18:04:10 crc kubenswrapper[4835]: I0202 18:04:10.578779 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-9479b" Feb 02 18:04:11 crc kubenswrapper[4835]: I0202 18:04:11.369757 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-t87f4/crc-debug-9479b"] Feb 02 18:04:11 crc kubenswrapper[4835]: I0202 18:04:11.378801 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-t87f4/crc-debug-9479b"] Feb 02 18:04:12 crc kubenswrapper[4835]: I0202 18:04:12.534560 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-t87f4/crc-debug-km6hl"] Feb 02 18:04:12 crc kubenswrapper[4835]: E0202 18:04:12.535320 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71" containerName="container-00" Feb 02 18:04:12 crc kubenswrapper[4835]: I0202 18:04:12.535334 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71" containerName="container-00" Feb 02 18:04:12 crc kubenswrapper[4835]: I0202 18:04:12.535553 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71" containerName="container-00" Feb 02 18:04:12 crc kubenswrapper[4835]: I0202 18:04:12.536243 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-km6hl" Feb 02 18:04:12 crc kubenswrapper[4835]: I0202 18:04:12.544902 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/367416ef-2834-402e-b704-d190da77154e-host\") pod \"crc-debug-km6hl\" (UID: \"367416ef-2834-402e-b704-d190da77154e\") " pod="openshift-must-gather-t87f4/crc-debug-km6hl" Feb 02 18:04:12 crc kubenswrapper[4835]: I0202 18:04:12.544983 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktkd5\" (UniqueName: \"kubernetes.io/projected/367416ef-2834-402e-b704-d190da77154e-kube-api-access-ktkd5\") pod \"crc-debug-km6hl\" (UID: \"367416ef-2834-402e-b704-d190da77154e\") " pod="openshift-must-gather-t87f4/crc-debug-km6hl" Feb 02 18:04:12 crc kubenswrapper[4835]: I0202 18:04:12.646738 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/367416ef-2834-402e-b704-d190da77154e-host\") pod \"crc-debug-km6hl\" (UID: \"367416ef-2834-402e-b704-d190da77154e\") " pod="openshift-must-gather-t87f4/crc-debug-km6hl" Feb 02 18:04:12 crc kubenswrapper[4835]: I0202 18:04:12.646794 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktkd5\" (UniqueName: \"kubernetes.io/projected/367416ef-2834-402e-b704-d190da77154e-kube-api-access-ktkd5\") pod \"crc-debug-km6hl\" (UID: \"367416ef-2834-402e-b704-d190da77154e\") " pod="openshift-must-gather-t87f4/crc-debug-km6hl" Feb 02 18:04:12 crc kubenswrapper[4835]: I0202 18:04:12.646895 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/367416ef-2834-402e-b704-d190da77154e-host\") pod \"crc-debug-km6hl\" (UID: \"367416ef-2834-402e-b704-d190da77154e\") " pod="openshift-must-gather-t87f4/crc-debug-km6hl" Feb 02 18:04:12 crc kubenswrapper[4835]: I0202 18:04:12.675250 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktkd5\" (UniqueName: \"kubernetes.io/projected/367416ef-2834-402e-b704-d190da77154e-kube-api-access-ktkd5\") pod \"crc-debug-km6hl\" (UID: \"367416ef-2834-402e-b704-d190da77154e\") " pod="openshift-must-gather-t87f4/crc-debug-km6hl" Feb 02 18:04:12 crc kubenswrapper[4835]: I0202 18:04:12.853426 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-km6hl" Feb 02 18:04:13 crc kubenswrapper[4835]: I0202 18:04:13.201169 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71" path="/var/lib/kubelet/pods/8cc54ca4-6bad-48e3-8bfb-72ced2ed7a71/volumes" Feb 02 18:04:13 crc kubenswrapper[4835]: I0202 18:04:13.609241 4835 generic.go:334] "Generic (PLEG): container finished" podID="367416ef-2834-402e-b704-d190da77154e" containerID="f2eaeddf515c8d53953575addfc339d7d476d055e1ecdad3362c81953e91a5ea" exitCode=0 Feb 02 18:04:13 crc kubenswrapper[4835]: I0202 18:04:13.609332 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/crc-debug-km6hl" event={"ID":"367416ef-2834-402e-b704-d190da77154e","Type":"ContainerDied","Data":"f2eaeddf515c8d53953575addfc339d7d476d055e1ecdad3362c81953e91a5ea"} Feb 02 18:04:13 crc kubenswrapper[4835]: I0202 18:04:13.609403 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/crc-debug-km6hl" event={"ID":"367416ef-2834-402e-b704-d190da77154e","Type":"ContainerStarted","Data":"9850b12cc0c05e56842a732553d51da1ff6753bb30778556f82ad337ce91b08c"} Feb 02 18:04:13 crc kubenswrapper[4835]: I0202 18:04:13.655445 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-t87f4/crc-debug-km6hl"] Feb 02 18:04:13 crc kubenswrapper[4835]: I0202 18:04:13.663765 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-t87f4/crc-debug-km6hl"] Feb 02 18:04:14 crc kubenswrapper[4835]: I0202 18:04:14.716761 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-km6hl" Feb 02 18:04:14 crc kubenswrapper[4835]: I0202 18:04:14.889732 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktkd5\" (UniqueName: \"kubernetes.io/projected/367416ef-2834-402e-b704-d190da77154e-kube-api-access-ktkd5\") pod \"367416ef-2834-402e-b704-d190da77154e\" (UID: \"367416ef-2834-402e-b704-d190da77154e\") " Feb 02 18:04:14 crc kubenswrapper[4835]: I0202 18:04:14.890027 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/367416ef-2834-402e-b704-d190da77154e-host\") pod \"367416ef-2834-402e-b704-d190da77154e\" (UID: \"367416ef-2834-402e-b704-d190da77154e\") " Feb 02 18:04:14 crc kubenswrapper[4835]: I0202 18:04:14.890159 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/367416ef-2834-402e-b704-d190da77154e-host" (OuterVolumeSpecName: "host") pod "367416ef-2834-402e-b704-d190da77154e" (UID: "367416ef-2834-402e-b704-d190da77154e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 18:04:14 crc kubenswrapper[4835]: I0202 18:04:14.890538 4835 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/367416ef-2834-402e-b704-d190da77154e-host\") on node \"crc\" DevicePath \"\"" Feb 02 18:04:14 crc kubenswrapper[4835]: I0202 18:04:14.896162 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/367416ef-2834-402e-b704-d190da77154e-kube-api-access-ktkd5" (OuterVolumeSpecName: "kube-api-access-ktkd5") pod "367416ef-2834-402e-b704-d190da77154e" (UID: "367416ef-2834-402e-b704-d190da77154e"). InnerVolumeSpecName "kube-api-access-ktkd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:04:14 crc kubenswrapper[4835]: I0202 18:04:14.993495 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktkd5\" (UniqueName: \"kubernetes.io/projected/367416ef-2834-402e-b704-d190da77154e-kube-api-access-ktkd5\") on node \"crc\" DevicePath \"\"" Feb 02 18:04:15 crc kubenswrapper[4835]: I0202 18:04:15.198995 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="367416ef-2834-402e-b704-d190da77154e" path="/var/lib/kubelet/pods/367416ef-2834-402e-b704-d190da77154e/volumes" Feb 02 18:04:15 crc kubenswrapper[4835]: I0202 18:04:15.632249 4835 scope.go:117] "RemoveContainer" containerID="f2eaeddf515c8d53953575addfc339d7d476d055e1ecdad3362c81953e91a5ea" Feb 02 18:04:15 crc kubenswrapper[4835]: I0202 18:04:15.632283 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/crc-debug-km6hl" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.014492 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-txs9m"] Feb 02 18:04:17 crc kubenswrapper[4835]: E0202 18:04:17.015455 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="367416ef-2834-402e-b704-d190da77154e" containerName="container-00" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.015472 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="367416ef-2834-402e-b704-d190da77154e" containerName="container-00" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.015764 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="367416ef-2834-402e-b704-d190da77154e" containerName="container-00" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.017859 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.028436 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-txs9m"] Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.133583 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7q2x\" (UniqueName: \"kubernetes.io/projected/195f888d-99b4-4dd0-90f9-e8d1056f4af2-kube-api-access-p7q2x\") pod \"redhat-operators-txs9m\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.133662 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-catalog-content\") pod \"redhat-operators-txs9m\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.133747 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-utilities\") pod \"redhat-operators-txs9m\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.235744 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7q2x\" (UniqueName: \"kubernetes.io/projected/195f888d-99b4-4dd0-90f9-e8d1056f4af2-kube-api-access-p7q2x\") pod \"redhat-operators-txs9m\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.235839 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-catalog-content\") pod \"redhat-operators-txs9m\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.235941 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-utilities\") pod \"redhat-operators-txs9m\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.236405 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-catalog-content\") pod \"redhat-operators-txs9m\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.236517 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-utilities\") pod \"redhat-operators-txs9m\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.256395 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7q2x\" (UniqueName: \"kubernetes.io/projected/195f888d-99b4-4dd0-90f9-e8d1056f4af2-kube-api-access-p7q2x\") pod \"redhat-operators-txs9m\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.339833 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:17 crc kubenswrapper[4835]: I0202 18:04:17.946607 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-txs9m"] Feb 02 18:04:18 crc kubenswrapper[4835]: I0202 18:04:18.662124 4835 generic.go:334] "Generic (PLEG): container finished" podID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerID="237c18d446783116f8c3ce8b965f21bed8bfde14a28f2b0efe172bb93a94d499" exitCode=0 Feb 02 18:04:18 crc kubenswrapper[4835]: I0202 18:04:18.662167 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txs9m" event={"ID":"195f888d-99b4-4dd0-90f9-e8d1056f4af2","Type":"ContainerDied","Data":"237c18d446783116f8c3ce8b965f21bed8bfde14a28f2b0efe172bb93a94d499"} Feb 02 18:04:18 crc kubenswrapper[4835]: I0202 18:04:18.662407 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txs9m" event={"ID":"195f888d-99b4-4dd0-90f9-e8d1056f4af2","Type":"ContainerStarted","Data":"a5682d14a6bc00a74499b09aa107523849a268e13e068463a3f76652b182c7ee"} Feb 02 18:04:20 crc kubenswrapper[4835]: I0202 18:04:20.682031 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txs9m" event={"ID":"195f888d-99b4-4dd0-90f9-e8d1056f4af2","Type":"ContainerStarted","Data":"513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e"} Feb 02 18:04:23 crc kubenswrapper[4835]: I0202 18:04:23.714716 4835 generic.go:334] "Generic (PLEG): container finished" podID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerID="513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e" exitCode=0 Feb 02 18:04:23 crc kubenswrapper[4835]: I0202 18:04:23.714806 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txs9m" event={"ID":"195f888d-99b4-4dd0-90f9-e8d1056f4af2","Type":"ContainerDied","Data":"513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e"} Feb 02 18:04:25 crc kubenswrapper[4835]: I0202 18:04:25.737030 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txs9m" event={"ID":"195f888d-99b4-4dd0-90f9-e8d1056f4af2","Type":"ContainerStarted","Data":"c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004"} Feb 02 18:04:25 crc kubenswrapper[4835]: I0202 18:04:25.764912 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-txs9m" podStartSLOduration=3.627963576 podStartE2EDuration="9.764894573s" podCreationTimestamp="2026-02-02 18:04:16 +0000 UTC" firstStartedPulling="2026-02-02 18:04:18.663841916 +0000 UTC m=+4450.285445996" lastFinishedPulling="2026-02-02 18:04:24.800772913 +0000 UTC m=+4456.422376993" observedRunningTime="2026-02-02 18:04:25.755984381 +0000 UTC m=+4457.377588461" watchObservedRunningTime="2026-02-02 18:04:25.764894573 +0000 UTC m=+4457.386498653" Feb 02 18:04:27 crc kubenswrapper[4835]: I0202 18:04:27.340633 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:27 crc kubenswrapper[4835]: I0202 18:04:27.340957 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:28 crc kubenswrapper[4835]: I0202 18:04:28.386817 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-txs9m" podUID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerName="registry-server" probeResult="failure" output=< Feb 02 18:04:28 crc kubenswrapper[4835]: timeout: failed to connect service ":50051" within 1s Feb 02 18:04:28 crc kubenswrapper[4835]: > Feb 02 18:04:37 crc kubenswrapper[4835]: I0202 18:04:37.394417 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:37 crc kubenswrapper[4835]: I0202 18:04:37.447384 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:37 crc kubenswrapper[4835]: I0202 18:04:37.637147 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-txs9m"] Feb 02 18:04:38 crc kubenswrapper[4835]: I0202 18:04:38.854644 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-txs9m" podUID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerName="registry-server" containerID="cri-o://c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004" gracePeriod=2 Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.826717 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.866515 4835 generic.go:334] "Generic (PLEG): container finished" podID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerID="c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004" exitCode=0 Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.866556 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txs9m" event={"ID":"195f888d-99b4-4dd0-90f9-e8d1056f4af2","Type":"ContainerDied","Data":"c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004"} Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.866585 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-txs9m" event={"ID":"195f888d-99b4-4dd0-90f9-e8d1056f4af2","Type":"ContainerDied","Data":"a5682d14a6bc00a74499b09aa107523849a268e13e068463a3f76652b182c7ee"} Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.866604 4835 scope.go:117] "RemoveContainer" containerID="c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004" Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.866761 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-txs9m" Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.891086 4835 scope.go:117] "RemoveContainer" containerID="513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e" Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.923569 4835 scope.go:117] "RemoveContainer" containerID="237c18d446783116f8c3ce8b965f21bed8bfde14a28f2b0efe172bb93a94d499" Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.977137 4835 scope.go:117] "RemoveContainer" containerID="c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004" Feb 02 18:04:39 crc kubenswrapper[4835]: E0202 18:04:39.977924 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004\": container with ID starting with c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004 not found: ID does not exist" containerID="c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004" Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.977979 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004"} err="failed to get container status \"c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004\": rpc error: code = NotFound desc = could not find container \"c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004\": container with ID starting with c1144e714d2203b609e9dd30c2e6b0a7f179b4d16a1c7f6c883ebe5238905004 not found: ID does not exist" Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.978010 4835 scope.go:117] "RemoveContainer" containerID="513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e" Feb 02 18:04:39 crc kubenswrapper[4835]: E0202 18:04:39.978329 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e\": container with ID starting with 513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e not found: ID does not exist" containerID="513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e" Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.978377 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e"} err="failed to get container status \"513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e\": rpc error: code = NotFound desc = could not find container \"513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e\": container with ID starting with 513dbac30ca56b4787f85141b797e07c2eaffc559b69a67d17ee17d2c081b17e not found: ID does not exist" Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.978399 4835 scope.go:117] "RemoveContainer" containerID="237c18d446783116f8c3ce8b965f21bed8bfde14a28f2b0efe172bb93a94d499" Feb 02 18:04:39 crc kubenswrapper[4835]: E0202 18:04:39.978653 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"237c18d446783116f8c3ce8b965f21bed8bfde14a28f2b0efe172bb93a94d499\": container with ID starting with 237c18d446783116f8c3ce8b965f21bed8bfde14a28f2b0efe172bb93a94d499 not found: ID does not exist" containerID="237c18d446783116f8c3ce8b965f21bed8bfde14a28f2b0efe172bb93a94d499" Feb 02 18:04:39 crc kubenswrapper[4835]: I0202 18:04:39.978673 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"237c18d446783116f8c3ce8b965f21bed8bfde14a28f2b0efe172bb93a94d499"} err="failed to get container status \"237c18d446783116f8c3ce8b965f21bed8bfde14a28f2b0efe172bb93a94d499\": rpc error: code = NotFound desc = could not find container \"237c18d446783116f8c3ce8b965f21bed8bfde14a28f2b0efe172bb93a94d499\": container with ID starting with 237c18d446783116f8c3ce8b965f21bed8bfde14a28f2b0efe172bb93a94d499 not found: ID does not exist" Feb 02 18:04:40 crc kubenswrapper[4835]: I0202 18:04:40.009089 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7q2x\" (UniqueName: \"kubernetes.io/projected/195f888d-99b4-4dd0-90f9-e8d1056f4af2-kube-api-access-p7q2x\") pod \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " Feb 02 18:04:40 crc kubenswrapper[4835]: I0202 18:04:40.009245 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-catalog-content\") pod \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " Feb 02 18:04:40 crc kubenswrapper[4835]: I0202 18:04:40.009354 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-utilities\") pod \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\" (UID: \"195f888d-99b4-4dd0-90f9-e8d1056f4af2\") " Feb 02 18:04:40 crc kubenswrapper[4835]: I0202 18:04:40.010458 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-utilities" (OuterVolumeSpecName: "utilities") pod "195f888d-99b4-4dd0-90f9-e8d1056f4af2" (UID: "195f888d-99b4-4dd0-90f9-e8d1056f4af2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:04:40 crc kubenswrapper[4835]: I0202 18:04:40.016918 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/195f888d-99b4-4dd0-90f9-e8d1056f4af2-kube-api-access-p7q2x" (OuterVolumeSpecName: "kube-api-access-p7q2x") pod "195f888d-99b4-4dd0-90f9-e8d1056f4af2" (UID: "195f888d-99b4-4dd0-90f9-e8d1056f4af2"). InnerVolumeSpecName "kube-api-access-p7q2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:04:40 crc kubenswrapper[4835]: I0202 18:04:40.112083 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7q2x\" (UniqueName: \"kubernetes.io/projected/195f888d-99b4-4dd0-90f9-e8d1056f4af2-kube-api-access-p7q2x\") on node \"crc\" DevicePath \"\"" Feb 02 18:04:40 crc kubenswrapper[4835]: I0202 18:04:40.112109 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 18:04:40 crc kubenswrapper[4835]: I0202 18:04:40.139511 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "195f888d-99b4-4dd0-90f9-e8d1056f4af2" (UID: "195f888d-99b4-4dd0-90f9-e8d1056f4af2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:04:40 crc kubenswrapper[4835]: I0202 18:04:40.206297 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-txs9m"] Feb 02 18:04:40 crc kubenswrapper[4835]: I0202 18:04:40.216963 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/195f888d-99b4-4dd0-90f9-e8d1056f4af2-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 18:04:40 crc kubenswrapper[4835]: I0202 18:04:40.221042 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-txs9m"] Feb 02 18:04:41 crc kubenswrapper[4835]: I0202 18:04:41.201142 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" path="/var/lib/kubelet/pods/195f888d-99b4-4dd0-90f9-e8d1056f4af2/volumes" Feb 02 18:04:42 crc kubenswrapper[4835]: I0202 18:04:42.972355 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-788b5b9b58-9wmkc_7466e48d-b9d4-4a34-917c-5ddd649eaac9/barbican-api/0.log" Feb 02 18:04:43 crc kubenswrapper[4835]: I0202 18:04:43.192464 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6f74b59756-mvv58_34fd3f27-2fa7-4a00-8389-97ac4ce31e33/barbican-keystone-listener/0.log" Feb 02 18:04:43 crc kubenswrapper[4835]: I0202 18:04:43.227661 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-788b5b9b58-9wmkc_7466e48d-b9d4-4a34-917c-5ddd649eaac9/barbican-api-log/0.log" Feb 02 18:04:43 crc kubenswrapper[4835]: I0202 18:04:43.401233 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6f74b59756-mvv58_34fd3f27-2fa7-4a00-8389-97ac4ce31e33/barbican-keystone-listener-log/0.log" Feb 02 18:04:43 crc kubenswrapper[4835]: I0202 18:04:43.441900 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5c47cddbff-wsm2t_79900da5-f2b8-4e39-8a30-feefcfec5a04/barbican-worker/0.log" Feb 02 18:04:43 crc kubenswrapper[4835]: I0202 18:04:43.481811 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5c47cddbff-wsm2t_79900da5-f2b8-4e39-8a30-feefcfec5a04/barbican-worker-log/0.log" Feb 02 18:04:43 crc kubenswrapper[4835]: I0202 18:04:43.682069 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_05826cd2-708f-4ce4-bbfb-04a0e6206c12/ceilometer-central-agent/0.log" Feb 02 18:04:43 crc kubenswrapper[4835]: I0202 18:04:43.688770 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw_29d9c1e8-035d-485c-bbfa-2c0328468c6a/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:43 crc kubenswrapper[4835]: I0202 18:04:43.812748 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_05826cd2-708f-4ce4-bbfb-04a0e6206c12/ceilometer-notification-agent/0.log" Feb 02 18:04:43 crc kubenswrapper[4835]: I0202 18:04:43.837924 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_05826cd2-708f-4ce4-bbfb-04a0e6206c12/proxy-httpd/0.log" Feb 02 18:04:43 crc kubenswrapper[4835]: I0202 18:04:43.877098 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_05826cd2-708f-4ce4-bbfb-04a0e6206c12/sg-core/0.log" Feb 02 18:04:44 crc kubenswrapper[4835]: I0202 18:04:44.033624 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7_7703d310-723f-40a8-bae2-d11570ea275b/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:44 crc kubenswrapper[4835]: I0202 18:04:44.087664 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8_644fa065-6ba2-4813-84c8-c8f3d8da2971/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:44 crc kubenswrapper[4835]: I0202 18:04:44.903562 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_99971416-88df-48dd-9e3a-91874214a8b6/cinder-api/0.log" Feb 02 18:04:44 crc kubenswrapper[4835]: I0202 18:04:44.938607 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_99971416-88df-48dd-9e3a-91874214a8b6/cinder-api-log/0.log" Feb 02 18:04:45 crc kubenswrapper[4835]: I0202 18:04:45.014602 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_ef67b40a-7472-4011-95ad-4713b23bf160/probe/0.log" Feb 02 18:04:45 crc kubenswrapper[4835]: I0202 18:04:45.323338 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9c6fe27c-e17a-4f0f-bc50-21b8d1b49081/cinder-scheduler/0.log" Feb 02 18:04:45 crc kubenswrapper[4835]: I0202 18:04:45.332815 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9c6fe27c-e17a-4f0f-bc50-21b8d1b49081/probe/0.log" Feb 02 18:04:45 crc kubenswrapper[4835]: I0202 18:04:45.590818 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_4b1fb0f8-db78-42d9-82e2-c0dcda0cd231/probe/0.log" Feb 02 18:04:45 crc kubenswrapper[4835]: I0202 18:04:45.860017 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-7shcq_87617dd5-12a8-49cc-867a-aa0f2d0db447/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:46 crc kubenswrapper[4835]: I0202 18:04:46.695378 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p_1125f088-790d-4b32-831f-970cba6dc015/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:46 crc kubenswrapper[4835]: I0202 18:04:46.753020 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_ef67b40a-7472-4011-95ad-4713b23bf160/cinder-backup/0.log" Feb 02 18:04:46 crc kubenswrapper[4835]: I0202 18:04:46.967465 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69655fd4bf-zddfv_12c4e956-4456-4f8e-b802-1db95f550d51/init/0.log" Feb 02 18:04:47 crc kubenswrapper[4835]: I0202 18:04:47.164079 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69655fd4bf-zddfv_12c4e956-4456-4f8e-b802-1db95f550d51/init/0.log" Feb 02 18:04:47 crc kubenswrapper[4835]: I0202 18:04:47.295225 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_0454a882-7982-44f7-8f83-3be157de886a/glance-httpd/0.log" Feb 02 18:04:47 crc kubenswrapper[4835]: I0202 18:04:47.346030 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69655fd4bf-zddfv_12c4e956-4456-4f8e-b802-1db95f550d51/dnsmasq-dns/0.log" Feb 02 18:04:47 crc kubenswrapper[4835]: I0202 18:04:47.406534 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_0454a882-7982-44f7-8f83-3be157de886a/glance-log/0.log" Feb 02 18:04:47 crc kubenswrapper[4835]: I0202 18:04:47.591423 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_7f0c5f67-a208-4b73-9f8b-c924d61cdf9e/glance-log/0.log" Feb 02 18:04:47 crc kubenswrapper[4835]: I0202 18:04:47.659024 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_7f0c5f67-a208-4b73-9f8b-c924d61cdf9e/glance-httpd/0.log" Feb 02 18:04:47 crc kubenswrapper[4835]: I0202 18:04:47.960200 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5f575cdbb6-2fppg_fec30fb3-23dc-4443-a90f-4fb8defb3a1f/horizon/0.log" Feb 02 18:04:48 crc kubenswrapper[4835]: I0202 18:04:48.011009 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz_58ba2cea-000b-458c-bb8f-c3f693512a30/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:48 crc kubenswrapper[4835]: I0202 18:04:48.147638 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5f575cdbb6-2fppg_fec30fb3-23dc-4443-a90f-4fb8defb3a1f/horizon-log/0.log" Feb 02 18:04:48 crc kubenswrapper[4835]: I0202 18:04:48.234095 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-4zj54_c814521e-9a8e-41bd-8eb9-05990dbe267f/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:48 crc kubenswrapper[4835]: I0202 18:04:48.508607 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29500921-22pwk_9d457835-0e10-405d-af73-9ef35d8f24b4/keystone-cron/0.log" Feb 02 18:04:48 crc kubenswrapper[4835]: I0202 18:04:48.701439 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_dc0384ad-df86-4939-8c71-92aff217a691/kube-state-metrics/0.log" Feb 02 18:04:48 crc kubenswrapper[4835]: I0202 18:04:48.830043 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7_8ebc7011-6fd1-437b-90dc-38f23dc004f5/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:49 crc kubenswrapper[4835]: I0202 18:04:49.118532 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_3a4997a1-3860-46d1-ba9f-a81c6800aec9/manila-api-log/0.log" Feb 02 18:04:49 crc kubenswrapper[4835]: I0202 18:04:49.182960 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_3a4997a1-3860-46d1-ba9f-a81c6800aec9/manila-api/0.log" Feb 02 18:04:49 crc kubenswrapper[4835]: I0202 18:04:49.319022 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-577d94f4db-mdlkk_0a44fa56-f689-4268-9973-867224dc13ef/keystone-api/0.log" Feb 02 18:04:49 crc kubenswrapper[4835]: I0202 18:04:49.463624 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_4f49a042-6b94-4a36-8607-1eb164147d96/probe/0.log" Feb 02 18:04:49 crc kubenswrapper[4835]: I0202 18:04:49.548040 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_4f49a042-6b94-4a36-8607-1eb164147d96/manila-scheduler/0.log" Feb 02 18:04:49 crc kubenswrapper[4835]: I0202 18:04:49.604245 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_eb3ede9c-1564-450a-b0c5-034c5ff8d285/manila-share/0.log" Feb 02 18:04:49 crc kubenswrapper[4835]: I0202 18:04:49.688437 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_eb3ede9c-1564-450a-b0c5-034c5ff8d285/probe/0.log" Feb 02 18:04:50 crc kubenswrapper[4835]: I0202 18:04:50.220345 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-66f46bdd4f-5p4b9_91d3abaa-c52b-495d-b400-8d7ad6ad28e9/neutron-httpd/0.log" Feb 02 18:04:50 crc kubenswrapper[4835]: I0202 18:04:50.307362 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-66f46bdd4f-5p4b9_91d3abaa-c52b-495d-b400-8d7ad6ad28e9/neutron-api/0.log" Feb 02 18:04:50 crc kubenswrapper[4835]: I0202 18:04:50.416091 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx_cb4a4f3f-7bb3-498f-b54c-bf0471877ff5/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:51 crc kubenswrapper[4835]: I0202 18:04:51.161454 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_fb71347d-bf06-4685-809c-a20715adc072/nova-cell0-conductor-conductor/0.log" Feb 02 18:04:51 crc kubenswrapper[4835]: I0202 18:04:51.246695 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_25c6e2e4-4f57-49a6-a558-92106e3f4856/nova-api-log/0.log" Feb 02 18:04:51 crc kubenswrapper[4835]: I0202 18:04:51.594150 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_7572ca57-a7e7-4025-8688-de2e52ece174/nova-cell1-conductor-conductor/0.log" Feb 02 18:04:51 crc kubenswrapper[4835]: I0202 18:04:51.609179 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_25c6e2e4-4f57-49a6-a558-92106e3f4856/nova-api-api/0.log" Feb 02 18:04:51 crc kubenswrapper[4835]: I0202 18:04:51.970204 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7_224a86ad-9920-4e35-8470-e48d3af63934/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:51 crc kubenswrapper[4835]: I0202 18:04:51.975858 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_dde1c61e-1816-44bc-b1bc-9e1545987087/nova-cell1-novncproxy-novncproxy/0.log" Feb 02 18:04:52 crc kubenswrapper[4835]: I0202 18:04:52.357972 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b112a741-ef20-4e18-a161-01ed24d9b5da/nova-metadata-log/0.log" Feb 02 18:04:52 crc kubenswrapper[4835]: I0202 18:04:52.741838 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_a4594b86-bb25-4c6b-922e-ecc018bf4081/nova-scheduler-scheduler/0.log" Feb 02 18:04:52 crc kubenswrapper[4835]: I0202 18:04:52.898058 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b630cc52-70e1-4580-8d73-df2507194554/mysql-bootstrap/0.log" Feb 02 18:04:53 crc kubenswrapper[4835]: I0202 18:04:53.113912 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b630cc52-70e1-4580-8d73-df2507194554/mysql-bootstrap/0.log" Feb 02 18:04:53 crc kubenswrapper[4835]: I0202 18:04:53.125092 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b630cc52-70e1-4580-8d73-df2507194554/galera/0.log" Feb 02 18:04:53 crc kubenswrapper[4835]: I0202 18:04:53.378215 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_eec68dd7-cf6a-45a4-a036-19bcf050c892/mysql-bootstrap/0.log" Feb 02 18:04:53 crc kubenswrapper[4835]: I0202 18:04:53.591025 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_eec68dd7-cf6a-45a4-a036-19bcf050c892/mysql-bootstrap/0.log" Feb 02 18:04:53 crc kubenswrapper[4835]: I0202 18:04:53.601098 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_eec68dd7-cf6a-45a4-a036-19bcf050c892/galera/0.log" Feb 02 18:04:53 crc kubenswrapper[4835]: I0202 18:04:53.786431 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_a73ab577-2970-4e91-bbde-344bd924ba2c/openstackclient/0.log" Feb 02 18:04:54 crc kubenswrapper[4835]: I0202 18:04:54.040581 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-hxh6p_e3608c64-7b50-4a57-a0ea-578164629872/ovn-controller/0.log" Feb 02 18:04:54 crc kubenswrapper[4835]: I0202 18:04:54.128147 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_4b1fb0f8-db78-42d9-82e2-c0dcda0cd231/cinder-volume/0.log" Feb 02 18:04:54 crc kubenswrapper[4835]: I0202 18:04:54.232843 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b112a741-ef20-4e18-a161-01ed24d9b5da/nova-metadata-metadata/0.log" Feb 02 18:04:54 crc kubenswrapper[4835]: I0202 18:04:54.242018 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-c6jjz_9ce87c37-0b7a-4a7a-b90f-f34aaa078035/openstack-network-exporter/0.log" Feb 02 18:04:54 crc kubenswrapper[4835]: I0202 18:04:54.629191 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-248s6_89395ae4-5378-4709-a8b2-5b412e709142/ovsdb-server-init/0.log" Feb 02 18:04:54 crc kubenswrapper[4835]: I0202 18:04:54.780388 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-248s6_89395ae4-5378-4709-a8b2-5b412e709142/ovsdb-server-init/0.log" Feb 02 18:04:54 crc kubenswrapper[4835]: I0202 18:04:54.780530 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-248s6_89395ae4-5378-4709-a8b2-5b412e709142/ovsdb-server/0.log" Feb 02 18:04:54 crc kubenswrapper[4835]: I0202 18:04:54.793636 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-248s6_89395ae4-5378-4709-a8b2-5b412e709142/ovs-vswitchd/0.log" Feb 02 18:04:55 crc kubenswrapper[4835]: I0202 18:04:55.318734 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a9b8ae61-599b-4f97-84a8-6af5a6e37e52/openstack-network-exporter/0.log" Feb 02 18:04:55 crc kubenswrapper[4835]: I0202 18:04:55.345179 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-5f2k5_51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:55 crc kubenswrapper[4835]: I0202 18:04:55.568370 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a9b8ae61-599b-4f97-84a8-6af5a6e37e52/ovn-northd/0.log" Feb 02 18:04:55 crc kubenswrapper[4835]: I0202 18:04:55.688244 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_4d2f7d44-7579-4cd7-867c-77a46a7296cc/openstack-network-exporter/0.log" Feb 02 18:04:55 crc kubenswrapper[4835]: I0202 18:04:55.704691 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_4d2f7d44-7579-4cd7-867c-77a46a7296cc/ovsdbserver-nb/0.log" Feb 02 18:04:55 crc kubenswrapper[4835]: I0202 18:04:55.846412 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_03234de4-e1af-4911-93b4-6da716177367/openstack-network-exporter/0.log" Feb 02 18:04:55 crc kubenswrapper[4835]: I0202 18:04:55.898221 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_03234de4-e1af-4911-93b4-6da716177367/ovsdbserver-sb/0.log" Feb 02 18:04:56 crc kubenswrapper[4835]: I0202 18:04:56.141643 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-b8f544fd4-zp9bk_b37e6604-22e9-4e3d-8b9e-27ac0fccad12/placement-api/0.log" Feb 02 18:04:56 crc kubenswrapper[4835]: I0202 18:04:56.235607 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e7014da0-d4d6-4279-9f39-e50a4bbcdda5/setup-container/0.log" Feb 02 18:04:56 crc kubenswrapper[4835]: I0202 18:04:56.274615 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-b8f544fd4-zp9bk_b37e6604-22e9-4e3d-8b9e-27ac0fccad12/placement-log/0.log" Feb 02 18:04:56 crc kubenswrapper[4835]: I0202 18:04:56.394060 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e7014da0-d4d6-4279-9f39-e50a4bbcdda5/setup-container/0.log" Feb 02 18:04:56 crc kubenswrapper[4835]: I0202 18:04:56.419027 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e7014da0-d4d6-4279-9f39-e50a4bbcdda5/rabbitmq/0.log" Feb 02 18:04:56 crc kubenswrapper[4835]: I0202 18:04:56.510251 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450/setup-container/0.log" Feb 02 18:04:57 crc kubenswrapper[4835]: I0202 18:04:57.130545 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450/setup-container/0.log" Feb 02 18:04:57 crc kubenswrapper[4835]: I0202 18:04:57.186244 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj_7f86d2bc-c7cf-42c8-b62a-828961f9e880/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:57 crc kubenswrapper[4835]: I0202 18:04:57.196130 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450/rabbitmq/0.log" Feb 02 18:04:57 crc kubenswrapper[4835]: I0202 18:04:57.389540 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6_fbf199f3-f350-4171-ad1a-0eb83e623e22/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:57 crc kubenswrapper[4835]: I0202 18:04:57.460443 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-5227h_c6aee2d4-013e-4ac6-a7f0-f5f640c724ed/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:04:57 crc kubenswrapper[4835]: I0202 18:04:57.657803 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-jm7mm_ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2/ssh-known-hosts-edpm-deployment/0.log" Feb 02 18:04:57 crc kubenswrapper[4835]: I0202 18:04:57.761527 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_0cfd7d28-c17f-4035-bd42-89b10e3c60eb/tempest-tests-tempest-tests-runner/0.log" Feb 02 18:04:57 crc kubenswrapper[4835]: I0202 18:04:57.873842 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_538dadbd-5539-459e-9939-f078b6bdda38/test-operator-logs-container/0.log" Feb 02 18:04:58 crc kubenswrapper[4835]: I0202 18:04:58.006463 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm_ae2d5259-fd96-4127-8bf9-ddba82deadf6/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:05:04 crc kubenswrapper[4835]: I0202 18:05:04.995793 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_a5b7fac0-4bb3-4138-9618-96bf25cbdde5/memcached/0.log" Feb 02 18:05:14 crc kubenswrapper[4835]: I0202 18:05:14.870760 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:05:14 crc kubenswrapper[4835]: I0202 18:05:14.871297 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:05:25 crc kubenswrapper[4835]: I0202 18:05:25.392051 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/util/0.log" Feb 02 18:05:25 crc kubenswrapper[4835]: I0202 18:05:25.749099 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/util/0.log" Feb 02 18:05:25 crc kubenswrapper[4835]: I0202 18:05:25.841740 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/pull/0.log" Feb 02 18:05:25 crc kubenswrapper[4835]: I0202 18:05:25.865389 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/pull/0.log" Feb 02 18:05:26 crc kubenswrapper[4835]: I0202 18:05:26.436855 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/util/0.log" Feb 02 18:05:26 crc kubenswrapper[4835]: I0202 18:05:26.437586 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/pull/0.log" Feb 02 18:05:26 crc kubenswrapper[4835]: I0202 18:05:26.469297 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/extract/0.log" Feb 02 18:05:26 crc kubenswrapper[4835]: I0202 18:05:26.798843 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-fc589b45f-wlvlw_ccf3b51e-9298-4a5e-ad19-feac0a171056/manager/0.log" Feb 02 18:05:26 crc kubenswrapper[4835]: I0202 18:05:26.869491 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-8f4c5cb64-2smkw_2fa52615-07a4-47bc-8a7c-62565638964e/manager/0.log" Feb 02 18:05:27 crc kubenswrapper[4835]: I0202 18:05:27.114825 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5d77f4dbc9-l2d9w_051510bb-9754-4866-932d-53e8f209af3e/manager/0.log" Feb 02 18:05:27 crc kubenswrapper[4835]: I0202 18:05:27.148056 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-65dc6c8d9c-sf4fj_8d738981-de82-4d01-a295-b14401942841/manager/0.log" Feb 02 18:05:27 crc kubenswrapper[4835]: I0202 18:05:27.339685 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-dpqkl_bfabfeb6-c3aa-4684-8a0a-c53b92a3a8cf/manager/0.log" Feb 02 18:05:27 crc kubenswrapper[4835]: I0202 18:05:27.780618 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-87bd9d46f-5bvq9_668fc23c-0c08-4f7e-839d-6fbcf5f6554d/manager/0.log" Feb 02 18:05:27 crc kubenswrapper[4835]: I0202 18:05:27.945803 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-xx8fb_cb34a8e8-0047-450d-898b-56164cd6f8c3/manager/0.log" Feb 02 18:05:28 crc kubenswrapper[4835]: I0202 18:05:28.138326 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-64469b487f-xdk9w_487c0b98-8b52-47fd-84ff-6637b6d79c8c/manager/0.log" Feb 02 18:05:28 crc kubenswrapper[4835]: I0202 18:05:28.264022 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7775d87d9d-knb4g_60282c99-48f4-4c72-92d2-c92b6720bcf7/manager/0.log" Feb 02 18:05:28 crc kubenswrapper[4835]: I0202 18:05:28.333005 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-5968f45b79-bhwd9_867a5e63-f2c8-45fe-a65a-a8c3d11de2b3/manager/0.log" Feb 02 18:05:28 crc kubenswrapper[4835]: I0202 18:05:28.521362 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-mt62w_9225b13b-9f7b-4e74-8fb2-1cdf6a3a7ce2/manager/0.log" Feb 02 18:05:28 crc kubenswrapper[4835]: I0202 18:05:28.640016 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-576995988b-pd7lc_a2b75f19-bcbe-4f09-9652-70f042d4bc29/manager/0.log" Feb 02 18:05:28 crc kubenswrapper[4835]: I0202 18:05:28.775745 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5644b66645-89lbp_ac2c47dc-967c-456e-affc-bb3c4ac5b6d0/manager/0.log" Feb 02 18:05:29 crc kubenswrapper[4835]: I0202 18:05:29.033925 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq_eee7ce8b-cbaf-48ff-80d8-92011b4a11fa/manager/0.log" Feb 02 18:05:29 crc kubenswrapper[4835]: I0202 18:05:29.301698 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-6d857fbf88-k28lf_2f7d609f-2d42-4252-912a-ccae13d46f7f/operator/0.log" Feb 02 18:05:29 crc kubenswrapper[4835]: I0202 18:05:29.518129 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-chdtk_4a0bb1dd-84ba-4d22-812d-b76e81c5b054/registry-server/0.log" Feb 02 18:05:29 crc kubenswrapper[4835]: I0202 18:05:29.633966 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-ccmdf_3b504454-3ebc-45b8-8e93-fcab1363ce3c/manager/0.log" Feb 02 18:05:29 crc kubenswrapper[4835]: I0202 18:05:29.833471 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-kjlbl_af6dd5c2-faa4-407d-b6bc-fffda146240b/manager/0.log" Feb 02 18:05:30 crc kubenswrapper[4835]: I0202 18:05:30.044694 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-wdspf_dc9b2536-2284-4bd8-b803-e6dc90e30016/operator/0.log" Feb 02 18:05:30 crc kubenswrapper[4835]: I0202 18:05:30.255434 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-7b89fdf75b-lrq25_563720e1-311a-4aea-b34b-e6ab1d5d7f44/manager/0.log" Feb 02 18:05:30 crc kubenswrapper[4835]: I0202 18:05:30.560797 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-2klm7_eb7ea012-63e3-4108-bb3b-904fd21a7c4c/manager/0.log" Feb 02 18:05:30 crc kubenswrapper[4835]: I0202 18:05:30.576185 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-565849b54-r2xwk_affef4c6-1369-40e7-882d-e0cc06c7a492/manager/0.log" Feb 02 18:05:30 crc kubenswrapper[4835]: I0202 18:05:30.803435 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-586b95b788-rrg8c_83fe7277-43df-4e53-b2e1-20ec1c340289/manager/0.log" Feb 02 18:05:30 crc kubenswrapper[4835]: I0202 18:05:30.981765 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7777b795b7-9mpk4_5166e3f9-91d3-4a6a-a4af-68e5063aa217/manager/0.log" Feb 02 18:05:31 crc kubenswrapper[4835]: I0202 18:05:31.078702 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7b89ddb58-vl7xb_c64313f5-c2dc-4a80-aee6-4c177172598f/manager/0.log" Feb 02 18:05:44 crc kubenswrapper[4835]: I0202 18:05:44.869637 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:05:44 crc kubenswrapper[4835]: I0202 18:05:44.870184 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:05:50 crc kubenswrapper[4835]: I0202 18:05:50.897159 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-txgvr_acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f/control-plane-machine-set-operator/0.log" Feb 02 18:05:51 crc kubenswrapper[4835]: I0202 18:05:51.107217 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-44vfm_67a8f4cf-ff9c-48ab-92dd-b2e096ab4192/machine-api-operator/0.log" Feb 02 18:05:51 crc kubenswrapper[4835]: I0202 18:05:51.118527 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-44vfm_67a8f4cf-ff9c-48ab-92dd-b2e096ab4192/kube-rbac-proxy/0.log" Feb 02 18:06:05 crc kubenswrapper[4835]: I0202 18:06:05.607724 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-rvjmd_65887296-1b4f-40f4-80f1-9889e34070cc/cert-manager-controller/0.log" Feb 02 18:06:05 crc kubenswrapper[4835]: I0202 18:06:05.759941 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-h5kzj_2d062425-7c9e-48fe-a566-bf101b0349cc/cert-manager-cainjector/0.log" Feb 02 18:06:06 crc kubenswrapper[4835]: I0202 18:06:06.019439 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-jjmjw_1f888664-2f9b-4bd3-bef9-dd8b65a2ab93/cert-manager-webhook/0.log" Feb 02 18:06:14 crc kubenswrapper[4835]: I0202 18:06:14.870626 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:06:14 crc kubenswrapper[4835]: I0202 18:06:14.871479 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:06:14 crc kubenswrapper[4835]: I0202 18:06:14.871528 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 18:06:14 crc kubenswrapper[4835]: I0202 18:06:14.872205 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 18:06:14 crc kubenswrapper[4835]: I0202 18:06:14.872259 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" gracePeriod=600 Feb 02 18:06:15 crc kubenswrapper[4835]: E0202 18:06:15.021689 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:06:15 crc kubenswrapper[4835]: I0202 18:06:15.664703 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" exitCode=0 Feb 02 18:06:15 crc kubenswrapper[4835]: I0202 18:06:15.664745 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf"} Feb 02 18:06:15 crc kubenswrapper[4835]: I0202 18:06:15.665057 4835 scope.go:117] "RemoveContainer" containerID="3e5d19b73694e7c39075ed4bdb14bd108b2271b3a8b41aae29ed31ae501ef6a9" Feb 02 18:06:15 crc kubenswrapper[4835]: I0202 18:06:15.666429 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:06:15 crc kubenswrapper[4835]: E0202 18:06:15.669329 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:06:19 crc kubenswrapper[4835]: I0202 18:06:19.973358 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-r4xs8_3d3a7f96-7388-4e16-991c-6e99de2387dc/nmstate-console-plugin/0.log" Feb 02 18:06:20 crc kubenswrapper[4835]: I0202 18:06:20.184148 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-9bwnb_b5dbad86-74ef-402c-b0ab-5b48d69e8ecc/nmstate-handler/0.log" Feb 02 18:06:20 crc kubenswrapper[4835]: I0202 18:06:20.243028 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-c5rjp_c615d857-c500-4fe2-b699-97a5d8ce3311/kube-rbac-proxy/0.log" Feb 02 18:06:20 crc kubenswrapper[4835]: I0202 18:06:20.258462 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-c5rjp_c615d857-c500-4fe2-b699-97a5d8ce3311/nmstate-metrics/0.log" Feb 02 18:06:20 crc kubenswrapper[4835]: I0202 18:06:20.426632 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-ck9dl_8abbe167-63ff-48da-ad70-f298a68aab19/nmstate-operator/0.log" Feb 02 18:06:20 crc kubenswrapper[4835]: I0202 18:06:20.476162 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-44zt5_9b89aa5a-f847-42b1-a763-3bdcbcde8158/nmstate-webhook/0.log" Feb 02 18:06:29 crc kubenswrapper[4835]: I0202 18:06:29.194292 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:06:29 crc kubenswrapper[4835]: E0202 18:06:29.195228 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.718652 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9jwjv"] Feb 02 18:06:31 crc kubenswrapper[4835]: E0202 18:06:31.719651 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerName="extract-utilities" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.719669 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerName="extract-utilities" Feb 02 18:06:31 crc kubenswrapper[4835]: E0202 18:06:31.719703 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerName="extract-content" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.719712 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerName="extract-content" Feb 02 18:06:31 crc kubenswrapper[4835]: E0202 18:06:31.719727 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerName="registry-server" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.719735 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerName="registry-server" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.719946 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="195f888d-99b4-4dd0-90f9-e8d1056f4af2" containerName="registry-server" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.721766 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.729887 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9jwjv"] Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.819127 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c42l7\" (UniqueName: \"kubernetes.io/projected/b935024b-d9f0-4bbf-b6b8-8b17deabc106-kube-api-access-c42l7\") pod \"redhat-marketplace-9jwjv\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.819232 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-catalog-content\") pod \"redhat-marketplace-9jwjv\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.819557 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-utilities\") pod \"redhat-marketplace-9jwjv\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.921674 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c42l7\" (UniqueName: \"kubernetes.io/projected/b935024b-d9f0-4bbf-b6b8-8b17deabc106-kube-api-access-c42l7\") pod \"redhat-marketplace-9jwjv\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.921798 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-catalog-content\") pod \"redhat-marketplace-9jwjv\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.921896 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-utilities\") pod \"redhat-marketplace-9jwjv\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.922406 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-utilities\") pod \"redhat-marketplace-9jwjv\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.923000 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-catalog-content\") pod \"redhat-marketplace-9jwjv\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:31 crc kubenswrapper[4835]: I0202 18:06:31.951434 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c42l7\" (UniqueName: \"kubernetes.io/projected/b935024b-d9f0-4bbf-b6b8-8b17deabc106-kube-api-access-c42l7\") pod \"redhat-marketplace-9jwjv\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:32 crc kubenswrapper[4835]: I0202 18:06:32.042140 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:32 crc kubenswrapper[4835]: I0202 18:06:32.535854 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9jwjv"] Feb 02 18:06:32 crc kubenswrapper[4835]: I0202 18:06:32.804857 4835 generic.go:334] "Generic (PLEG): container finished" podID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" containerID="eebee6379c659b6ab0a042d44dcb18a255f65c585f9fc50b741c782e68b81f1c" exitCode=0 Feb 02 18:06:32 crc kubenswrapper[4835]: I0202 18:06:32.804905 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9jwjv" event={"ID":"b935024b-d9f0-4bbf-b6b8-8b17deabc106","Type":"ContainerDied","Data":"eebee6379c659b6ab0a042d44dcb18a255f65c585f9fc50b741c782e68b81f1c"} Feb 02 18:06:32 crc kubenswrapper[4835]: I0202 18:06:32.804973 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9jwjv" event={"ID":"b935024b-d9f0-4bbf-b6b8-8b17deabc106","Type":"ContainerStarted","Data":"e84d5e3da17f0b66fb7f77eb40f56e699fb1fec8cdcd735351c836e9977a30e0"} Feb 02 18:06:33 crc kubenswrapper[4835]: I0202 18:06:33.815000 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9jwjv" event={"ID":"b935024b-d9f0-4bbf-b6b8-8b17deabc106","Type":"ContainerStarted","Data":"673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51"} Feb 02 18:06:34 crc kubenswrapper[4835]: I0202 18:06:34.824145 4835 generic.go:334] "Generic (PLEG): container finished" podID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" containerID="673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51" exitCode=0 Feb 02 18:06:34 crc kubenswrapper[4835]: I0202 18:06:34.824328 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9jwjv" event={"ID":"b935024b-d9f0-4bbf-b6b8-8b17deabc106","Type":"ContainerDied","Data":"673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51"} Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.700762 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zjr47"] Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.702887 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.717770 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zjr47"] Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.794528 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmj64\" (UniqueName: \"kubernetes.io/projected/69938eab-61e8-4f89-b991-b15e3748a59b-kube-api-access-pmj64\") pod \"community-operators-zjr47\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.794599 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-utilities\") pod \"community-operators-zjr47\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.794720 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-catalog-content\") pod \"community-operators-zjr47\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.844049 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9jwjv" event={"ID":"b935024b-d9f0-4bbf-b6b8-8b17deabc106","Type":"ContainerStarted","Data":"00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1"} Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.865513 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9jwjv" podStartSLOduration=2.442173252 podStartE2EDuration="4.865491446s" podCreationTimestamp="2026-02-02 18:06:31 +0000 UTC" firstStartedPulling="2026-02-02 18:06:32.806456176 +0000 UTC m=+4584.428060256" lastFinishedPulling="2026-02-02 18:06:35.22977437 +0000 UTC m=+4586.851378450" observedRunningTime="2026-02-02 18:06:35.864371844 +0000 UTC m=+4587.485975944" watchObservedRunningTime="2026-02-02 18:06:35.865491446 +0000 UTC m=+4587.487095526" Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.896482 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmj64\" (UniqueName: \"kubernetes.io/projected/69938eab-61e8-4f89-b991-b15e3748a59b-kube-api-access-pmj64\") pod \"community-operators-zjr47\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.896761 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-utilities\") pod \"community-operators-zjr47\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.896978 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-catalog-content\") pod \"community-operators-zjr47\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.897809 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-catalog-content\") pod \"community-operators-zjr47\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.898176 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-utilities\") pod \"community-operators-zjr47\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:35 crc kubenswrapper[4835]: I0202 18:06:35.924116 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmj64\" (UniqueName: \"kubernetes.io/projected/69938eab-61e8-4f89-b991-b15e3748a59b-kube-api-access-pmj64\") pod \"community-operators-zjr47\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:36 crc kubenswrapper[4835]: I0202 18:06:36.044074 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:36 crc kubenswrapper[4835]: I0202 18:06:36.854199 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zjr47"] Feb 02 18:06:36 crc kubenswrapper[4835]: W0202 18:06:36.862314 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69938eab_61e8_4f89_b991_b15e3748a59b.slice/crio-8f3bb0ac05de86311c0ee4ffce1b2f03a0cea12f207844a0841ae12329caca08 WatchSource:0}: Error finding container 8f3bb0ac05de86311c0ee4ffce1b2f03a0cea12f207844a0841ae12329caca08: Status 404 returned error can't find the container with id 8f3bb0ac05de86311c0ee4ffce1b2f03a0cea12f207844a0841ae12329caca08 Feb 02 18:06:37 crc kubenswrapper[4835]: I0202 18:06:37.860388 4835 generic.go:334] "Generic (PLEG): container finished" podID="69938eab-61e8-4f89-b991-b15e3748a59b" containerID="17bcd13ec4a8390ffff88b182af5812f471c08db4d1e231f887d0f01e47a3045" exitCode=0 Feb 02 18:06:37 crc kubenswrapper[4835]: I0202 18:06:37.860496 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjr47" event={"ID":"69938eab-61e8-4f89-b991-b15e3748a59b","Type":"ContainerDied","Data":"17bcd13ec4a8390ffff88b182af5812f471c08db4d1e231f887d0f01e47a3045"} Feb 02 18:06:37 crc kubenswrapper[4835]: I0202 18:06:37.860690 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjr47" event={"ID":"69938eab-61e8-4f89-b991-b15e3748a59b","Type":"ContainerStarted","Data":"8f3bb0ac05de86311c0ee4ffce1b2f03a0cea12f207844a0841ae12329caca08"} Feb 02 18:06:38 crc kubenswrapper[4835]: I0202 18:06:38.870779 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjr47" event={"ID":"69938eab-61e8-4f89-b991-b15e3748a59b","Type":"ContainerStarted","Data":"b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb"} Feb 02 18:06:40 crc kubenswrapper[4835]: I0202 18:06:40.890171 4835 generic.go:334] "Generic (PLEG): container finished" podID="69938eab-61e8-4f89-b991-b15e3748a59b" containerID="b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb" exitCode=0 Feb 02 18:06:40 crc kubenswrapper[4835]: I0202 18:06:40.890260 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjr47" event={"ID":"69938eab-61e8-4f89-b991-b15e3748a59b","Type":"ContainerDied","Data":"b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb"} Feb 02 18:06:41 crc kubenswrapper[4835]: I0202 18:06:41.901592 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjr47" event={"ID":"69938eab-61e8-4f89-b991-b15e3748a59b","Type":"ContainerStarted","Data":"90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84"} Feb 02 18:06:42 crc kubenswrapper[4835]: I0202 18:06:42.042765 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:42 crc kubenswrapper[4835]: I0202 18:06:42.043787 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:42 crc kubenswrapper[4835]: I0202 18:06:42.092882 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:42 crc kubenswrapper[4835]: I0202 18:06:42.122153 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zjr47" podStartSLOduration=3.44596384 podStartE2EDuration="7.122131629s" podCreationTimestamp="2026-02-02 18:06:35 +0000 UTC" firstStartedPulling="2026-02-02 18:06:37.86372996 +0000 UTC m=+4589.485334040" lastFinishedPulling="2026-02-02 18:06:41.539897749 +0000 UTC m=+4593.161501829" observedRunningTime="2026-02-02 18:06:41.921630701 +0000 UTC m=+4593.543234781" watchObservedRunningTime="2026-02-02 18:06:42.122131629 +0000 UTC m=+4593.743735729" Feb 02 18:06:42 crc kubenswrapper[4835]: I0202 18:06:42.959012 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:43 crc kubenswrapper[4835]: I0202 18:06:43.190887 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:06:43 crc kubenswrapper[4835]: E0202 18:06:43.191113 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:06:43 crc kubenswrapper[4835]: I0202 18:06:43.291404 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9jwjv"] Feb 02 18:06:44 crc kubenswrapper[4835]: I0202 18:06:44.924188 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9jwjv" podUID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" containerName="registry-server" containerID="cri-o://00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1" gracePeriod=2 Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.390853 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.517295 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-catalog-content\") pod \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.525361 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-utilities\") pod \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.525451 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c42l7\" (UniqueName: \"kubernetes.io/projected/b935024b-d9f0-4bbf-b6b8-8b17deabc106-kube-api-access-c42l7\") pod \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\" (UID: \"b935024b-d9f0-4bbf-b6b8-8b17deabc106\") " Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.526162 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-utilities" (OuterVolumeSpecName: "utilities") pod "b935024b-d9f0-4bbf-b6b8-8b17deabc106" (UID: "b935024b-d9f0-4bbf-b6b8-8b17deabc106"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.526289 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.531423 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b935024b-d9f0-4bbf-b6b8-8b17deabc106-kube-api-access-c42l7" (OuterVolumeSpecName: "kube-api-access-c42l7") pod "b935024b-d9f0-4bbf-b6b8-8b17deabc106" (UID: "b935024b-d9f0-4bbf-b6b8-8b17deabc106"). InnerVolumeSpecName "kube-api-access-c42l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.539207 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b935024b-d9f0-4bbf-b6b8-8b17deabc106" (UID: "b935024b-d9f0-4bbf-b6b8-8b17deabc106"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.628092 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c42l7\" (UniqueName: \"kubernetes.io/projected/b935024b-d9f0-4bbf-b6b8-8b17deabc106-kube-api-access-c42l7\") on node \"crc\" DevicePath \"\"" Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.628133 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b935024b-d9f0-4bbf-b6b8-8b17deabc106-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.934853 4835 generic.go:334] "Generic (PLEG): container finished" podID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" containerID="00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1" exitCode=0 Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.934898 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9jwjv" event={"ID":"b935024b-d9f0-4bbf-b6b8-8b17deabc106","Type":"ContainerDied","Data":"00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1"} Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.934925 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9jwjv" event={"ID":"b935024b-d9f0-4bbf-b6b8-8b17deabc106","Type":"ContainerDied","Data":"e84d5e3da17f0b66fb7f77eb40f56e699fb1fec8cdcd735351c836e9977a30e0"} Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.934943 4835 scope.go:117] "RemoveContainer" containerID="00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1" Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.935085 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9jwjv" Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.959131 4835 scope.go:117] "RemoveContainer" containerID="673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51" Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.973152 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9jwjv"] Feb 02 18:06:45 crc kubenswrapper[4835]: I0202 18:06:45.981692 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9jwjv"] Feb 02 18:06:46 crc kubenswrapper[4835]: I0202 18:06:46.045414 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:46 crc kubenswrapper[4835]: I0202 18:06:46.045544 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:46 crc kubenswrapper[4835]: I0202 18:06:46.067266 4835 scope.go:117] "RemoveContainer" containerID="eebee6379c659b6ab0a042d44dcb18a255f65c585f9fc50b741c782e68b81f1c" Feb 02 18:06:46 crc kubenswrapper[4835]: I0202 18:06:46.111138 4835 scope.go:117] "RemoveContainer" containerID="00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1" Feb 02 18:06:46 crc kubenswrapper[4835]: E0202 18:06:46.111544 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1\": container with ID starting with 00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1 not found: ID does not exist" containerID="00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1" Feb 02 18:06:46 crc kubenswrapper[4835]: I0202 18:06:46.111585 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1"} err="failed to get container status \"00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1\": rpc error: code = NotFound desc = could not find container \"00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1\": container with ID starting with 00087d1fab1c8a7b3ed303f4f20e878e615c28cf25a74e81c34e22badeba87e1 not found: ID does not exist" Feb 02 18:06:46 crc kubenswrapper[4835]: I0202 18:06:46.111611 4835 scope.go:117] "RemoveContainer" containerID="673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51" Feb 02 18:06:46 crc kubenswrapper[4835]: E0202 18:06:46.111901 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51\": container with ID starting with 673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51 not found: ID does not exist" containerID="673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51" Feb 02 18:06:46 crc kubenswrapper[4835]: I0202 18:06:46.111930 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51"} err="failed to get container status \"673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51\": rpc error: code = NotFound desc = could not find container \"673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51\": container with ID starting with 673ff0dd7387154bad37a3890b46e105c461a646004571925444ee489fd3df51 not found: ID does not exist" Feb 02 18:06:46 crc kubenswrapper[4835]: I0202 18:06:46.111948 4835 scope.go:117] "RemoveContainer" containerID="eebee6379c659b6ab0a042d44dcb18a255f65c585f9fc50b741c782e68b81f1c" Feb 02 18:06:46 crc kubenswrapper[4835]: E0202 18:06:46.112195 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eebee6379c659b6ab0a042d44dcb18a255f65c585f9fc50b741c782e68b81f1c\": container with ID starting with eebee6379c659b6ab0a042d44dcb18a255f65c585f9fc50b741c782e68b81f1c not found: ID does not exist" containerID="eebee6379c659b6ab0a042d44dcb18a255f65c585f9fc50b741c782e68b81f1c" Feb 02 18:06:46 crc kubenswrapper[4835]: I0202 18:06:46.112219 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eebee6379c659b6ab0a042d44dcb18a255f65c585f9fc50b741c782e68b81f1c"} err="failed to get container status \"eebee6379c659b6ab0a042d44dcb18a255f65c585f9fc50b741c782e68b81f1c\": rpc error: code = NotFound desc = could not find container \"eebee6379c659b6ab0a042d44dcb18a255f65c585f9fc50b741c782e68b81f1c\": container with ID starting with eebee6379c659b6ab0a042d44dcb18a255f65c585f9fc50b741c782e68b81f1c not found: ID does not exist" Feb 02 18:06:46 crc kubenswrapper[4835]: I0202 18:06:46.207539 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:46 crc kubenswrapper[4835]: I0202 18:06:46.994512 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:47 crc kubenswrapper[4835]: I0202 18:06:47.198674 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" path="/var/lib/kubelet/pods/b935024b-d9f0-4bbf-b6b8-8b17deabc106/volumes" Feb 02 18:06:48 crc kubenswrapper[4835]: I0202 18:06:48.502477 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zjr47"] Feb 02 18:06:49 crc kubenswrapper[4835]: I0202 18:06:49.967566 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zjr47" podUID="69938eab-61e8-4f89-b991-b15e3748a59b" containerName="registry-server" containerID="cri-o://90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84" gracePeriod=2 Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.418505 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.520772 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-catalog-content\") pod \"69938eab-61e8-4f89-b991-b15e3748a59b\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.520942 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-utilities\") pod \"69938eab-61e8-4f89-b991-b15e3748a59b\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.520992 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmj64\" (UniqueName: \"kubernetes.io/projected/69938eab-61e8-4f89-b991-b15e3748a59b-kube-api-access-pmj64\") pod \"69938eab-61e8-4f89-b991-b15e3748a59b\" (UID: \"69938eab-61e8-4f89-b991-b15e3748a59b\") " Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.523510 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-utilities" (OuterVolumeSpecName: "utilities") pod "69938eab-61e8-4f89-b991-b15e3748a59b" (UID: "69938eab-61e8-4f89-b991-b15e3748a59b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.528258 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69938eab-61e8-4f89-b991-b15e3748a59b-kube-api-access-pmj64" (OuterVolumeSpecName: "kube-api-access-pmj64") pod "69938eab-61e8-4f89-b991-b15e3748a59b" (UID: "69938eab-61e8-4f89-b991-b15e3748a59b"). InnerVolumeSpecName "kube-api-access-pmj64". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.591272 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69938eab-61e8-4f89-b991-b15e3748a59b" (UID: "69938eab-61e8-4f89-b991-b15e3748a59b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.622969 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.623013 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69938eab-61e8-4f89-b991-b15e3748a59b-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.623027 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmj64\" (UniqueName: \"kubernetes.io/projected/69938eab-61e8-4f89-b991-b15e3748a59b-kube-api-access-pmj64\") on node \"crc\" DevicePath \"\"" Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.987120 4835 generic.go:334] "Generic (PLEG): container finished" podID="69938eab-61e8-4f89-b991-b15e3748a59b" containerID="90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84" exitCode=0 Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.987167 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjr47" event={"ID":"69938eab-61e8-4f89-b991-b15e3748a59b","Type":"ContainerDied","Data":"90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84"} Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.987207 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjr47" event={"ID":"69938eab-61e8-4f89-b991-b15e3748a59b","Type":"ContainerDied","Data":"8f3bb0ac05de86311c0ee4ffce1b2f03a0cea12f207844a0841ae12329caca08"} Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.987228 4835 scope.go:117] "RemoveContainer" containerID="90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84" Feb 02 18:06:50 crc kubenswrapper[4835]: I0202 18:06:50.987422 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zjr47" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.009282 4835 scope.go:117] "RemoveContainer" containerID="b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.022477 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zjr47"] Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.031134 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zjr47"] Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.043837 4835 scope.go:117] "RemoveContainer" containerID="17bcd13ec4a8390ffff88b182af5812f471c08db4d1e231f887d0f01e47a3045" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.091762 4835 scope.go:117] "RemoveContainer" containerID="90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84" Feb 02 18:06:51 crc kubenswrapper[4835]: E0202 18:06:51.092201 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84\": container with ID starting with 90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84 not found: ID does not exist" containerID="90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.092260 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84"} err="failed to get container status \"90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84\": rpc error: code = NotFound desc = could not find container \"90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84\": container with ID starting with 90bea7e63cd070e1ae5f00df4d86fe0ad6f38b24d7032e8833d18cd2bb8b7a84 not found: ID does not exist" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.092293 4835 scope.go:117] "RemoveContainer" containerID="b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb" Feb 02 18:06:51 crc kubenswrapper[4835]: E0202 18:06:51.092527 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb\": container with ID starting with b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb not found: ID does not exist" containerID="b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.092549 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb"} err="failed to get container status \"b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb\": rpc error: code = NotFound desc = could not find container \"b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb\": container with ID starting with b21d3f5a444f783532b69a0e084b464ad5be4598b5fdbf848a075178863671eb not found: ID does not exist" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.092562 4835 scope.go:117] "RemoveContainer" containerID="17bcd13ec4a8390ffff88b182af5812f471c08db4d1e231f887d0f01e47a3045" Feb 02 18:06:51 crc kubenswrapper[4835]: E0202 18:06:51.092766 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17bcd13ec4a8390ffff88b182af5812f471c08db4d1e231f887d0f01e47a3045\": container with ID starting with 17bcd13ec4a8390ffff88b182af5812f471c08db4d1e231f887d0f01e47a3045 not found: ID does not exist" containerID="17bcd13ec4a8390ffff88b182af5812f471c08db4d1e231f887d0f01e47a3045" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.092790 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17bcd13ec4a8390ffff88b182af5812f471c08db4d1e231f887d0f01e47a3045"} err="failed to get container status \"17bcd13ec4a8390ffff88b182af5812f471c08db4d1e231f887d0f01e47a3045\": rpc error: code = NotFound desc = could not find container \"17bcd13ec4a8390ffff88b182af5812f471c08db4d1e231f887d0f01e47a3045\": container with ID starting with 17bcd13ec4a8390ffff88b182af5812f471c08db4d1e231f887d0f01e47a3045 not found: ID does not exist" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.198381 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69938eab-61e8-4f89-b991-b15e3748a59b" path="/var/lib/kubelet/pods/69938eab-61e8-4f89-b991-b15e3748a59b/volumes" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.217938 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-dsndw_0390332b-c0b4-4a28-b815-69ad9d9bed13/kube-rbac-proxy/0.log" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.253064 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-dsndw_0390332b-c0b4-4a28-b815-69ad9d9bed13/controller/0.log" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.365157 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-66lj9_8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e/frr-k8s-webhook-server/0.log" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.494256 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-frr-files/0.log" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.665522 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-metrics/0.log" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.673329 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-reloader/0.log" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.865933 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-reloader/0.log" Feb 02 18:06:51 crc kubenswrapper[4835]: I0202 18:06:51.875412 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-frr-files/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.055110 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-reloader/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.086449 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-metrics/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.097458 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-frr-files/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.099598 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-metrics/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.248392 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-frr-files/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.260267 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-metrics/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.296166 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-reloader/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.306186 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/controller/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.473316 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/frr-metrics/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.512877 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/kube-rbac-proxy/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.560927 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/kube-rbac-proxy-frr/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.656161 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/reloader/0.log" Feb 02 18:06:52 crc kubenswrapper[4835]: I0202 18:06:52.763536 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-b7878cd68-s889h_846128ff-a92d-40b9-835b-3184cb35de48/manager/0.log" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.009824 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5f9664b4df-s2qs9_5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73/webhook-server/0.log" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.111428 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-j7rvh"] Feb 02 18:06:53 crc kubenswrapper[4835]: E0202 18:06:53.112110 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69938eab-61e8-4f89-b991-b15e3748a59b" containerName="extract-utilities" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.112122 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="69938eab-61e8-4f89-b991-b15e3748a59b" containerName="extract-utilities" Feb 02 18:06:53 crc kubenswrapper[4835]: E0202 18:06:53.112137 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69938eab-61e8-4f89-b991-b15e3748a59b" containerName="extract-content" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.112143 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="69938eab-61e8-4f89-b991-b15e3748a59b" containerName="extract-content" Feb 02 18:06:53 crc kubenswrapper[4835]: E0202 18:06:53.112157 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" containerName="registry-server" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.112163 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" containerName="registry-server" Feb 02 18:06:53 crc kubenswrapper[4835]: E0202 18:06:53.112181 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" containerName="extract-utilities" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.112186 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" containerName="extract-utilities" Feb 02 18:06:53 crc kubenswrapper[4835]: E0202 18:06:53.112205 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69938eab-61e8-4f89-b991-b15e3748a59b" containerName="registry-server" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.112211 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="69938eab-61e8-4f89-b991-b15e3748a59b" containerName="registry-server" Feb 02 18:06:53 crc kubenswrapper[4835]: E0202 18:06:53.112226 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" containerName="extract-content" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.112236 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" containerName="extract-content" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.112495 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="b935024b-d9f0-4bbf-b6b8-8b17deabc106" containerName="registry-server" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.112515 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="69938eab-61e8-4f89-b991-b15e3748a59b" containerName="registry-server" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.113981 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.123852 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j7rvh"] Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.164811 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-glqbw_9b37cd70-fe2e-406b-a1f2-5aade78f75e4/kube-rbac-proxy/0.log" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.168935 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-catalog-content\") pod \"certified-operators-j7rvh\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.169236 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-utilities\") pod \"certified-operators-j7rvh\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.169474 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwvml\" (UniqueName: \"kubernetes.io/projected/010f2927-4f9c-4a7f-b67f-1e126db228c8-kube-api-access-gwvml\") pod \"certified-operators-j7rvh\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.274437 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-utilities\") pod \"certified-operators-j7rvh\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.274544 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwvml\" (UniqueName: \"kubernetes.io/projected/010f2927-4f9c-4a7f-b67f-1e126db228c8-kube-api-access-gwvml\") pod \"certified-operators-j7rvh\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.274617 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-catalog-content\") pod \"certified-operators-j7rvh\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.276505 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-utilities\") pod \"certified-operators-j7rvh\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.277115 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-catalog-content\") pod \"certified-operators-j7rvh\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.312945 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwvml\" (UniqueName: \"kubernetes.io/projected/010f2927-4f9c-4a7f-b67f-1e126db228c8-kube-api-access-gwvml\") pod \"certified-operators-j7rvh\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.462126 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.897931 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-glqbw_9b37cd70-fe2e-406b-a1f2-5aade78f75e4/speaker/0.log" Feb 02 18:06:53 crc kubenswrapper[4835]: I0202 18:06:53.926086 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j7rvh"] Feb 02 18:06:54 crc kubenswrapper[4835]: I0202 18:06:54.054398 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7rvh" event={"ID":"010f2927-4f9c-4a7f-b67f-1e126db228c8","Type":"ContainerStarted","Data":"022c2d5caba3dd034a0ef7f8b86bcec27ab0199e817657ff00a98b20f79ab489"} Feb 02 18:06:54 crc kubenswrapper[4835]: I0202 18:06:54.371336 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/frr/0.log" Feb 02 18:06:55 crc kubenswrapper[4835]: I0202 18:06:55.070722 4835 generic.go:334] "Generic (PLEG): container finished" podID="010f2927-4f9c-4a7f-b67f-1e126db228c8" containerID="b9e0d034ae2263200f04c788a416aab4353dd3beee9628821e82f83cee28d8a7" exitCode=0 Feb 02 18:06:55 crc kubenswrapper[4835]: I0202 18:06:55.071025 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7rvh" event={"ID":"010f2927-4f9c-4a7f-b67f-1e126db228c8","Type":"ContainerDied","Data":"b9e0d034ae2263200f04c788a416aab4353dd3beee9628821e82f83cee28d8a7"} Feb 02 18:06:56 crc kubenswrapper[4835]: I0202 18:06:56.190263 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:06:56 crc kubenswrapper[4835]: E0202 18:06:56.191264 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:06:57 crc kubenswrapper[4835]: I0202 18:06:57.096502 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7rvh" event={"ID":"010f2927-4f9c-4a7f-b67f-1e126db228c8","Type":"ContainerStarted","Data":"8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788"} Feb 02 18:06:58 crc kubenswrapper[4835]: I0202 18:06:58.104599 4835 generic.go:334] "Generic (PLEG): container finished" podID="010f2927-4f9c-4a7f-b67f-1e126db228c8" containerID="8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788" exitCode=0 Feb 02 18:06:58 crc kubenswrapper[4835]: I0202 18:06:58.104650 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7rvh" event={"ID":"010f2927-4f9c-4a7f-b67f-1e126db228c8","Type":"ContainerDied","Data":"8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788"} Feb 02 18:06:59 crc kubenswrapper[4835]: I0202 18:06:59.116224 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7rvh" event={"ID":"010f2927-4f9c-4a7f-b67f-1e126db228c8","Type":"ContainerStarted","Data":"2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8"} Feb 02 18:06:59 crc kubenswrapper[4835]: I0202 18:06:59.140251 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-j7rvh" podStartSLOduration=2.416907511 podStartE2EDuration="6.140232884s" podCreationTimestamp="2026-02-02 18:06:53 +0000 UTC" firstStartedPulling="2026-02-02 18:06:55.073736952 +0000 UTC m=+4606.695341032" lastFinishedPulling="2026-02-02 18:06:58.797062325 +0000 UTC m=+4610.418666405" observedRunningTime="2026-02-02 18:06:59.131894968 +0000 UTC m=+4610.753499058" watchObservedRunningTime="2026-02-02 18:06:59.140232884 +0000 UTC m=+4610.761836964" Feb 02 18:07:03 crc kubenswrapper[4835]: I0202 18:07:03.463775 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:07:03 crc kubenswrapper[4835]: I0202 18:07:03.464391 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:07:03 crc kubenswrapper[4835]: I0202 18:07:03.513707 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:07:04 crc kubenswrapper[4835]: I0202 18:07:04.204696 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:07:04 crc kubenswrapper[4835]: I0202 18:07:04.252326 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j7rvh"] Feb 02 18:07:06 crc kubenswrapper[4835]: I0202 18:07:06.172246 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-j7rvh" podUID="010f2927-4f9c-4a7f-b67f-1e126db228c8" containerName="registry-server" containerID="cri-o://2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8" gracePeriod=2 Feb 02 18:07:06 crc kubenswrapper[4835]: I0202 18:07:06.681342 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:07:06 crc kubenswrapper[4835]: I0202 18:07:06.758542 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwvml\" (UniqueName: \"kubernetes.io/projected/010f2927-4f9c-4a7f-b67f-1e126db228c8-kube-api-access-gwvml\") pod \"010f2927-4f9c-4a7f-b67f-1e126db228c8\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " Feb 02 18:07:06 crc kubenswrapper[4835]: I0202 18:07:06.758631 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-catalog-content\") pod \"010f2927-4f9c-4a7f-b67f-1e126db228c8\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " Feb 02 18:07:06 crc kubenswrapper[4835]: I0202 18:07:06.758895 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-utilities\") pod \"010f2927-4f9c-4a7f-b67f-1e126db228c8\" (UID: \"010f2927-4f9c-4a7f-b67f-1e126db228c8\") " Feb 02 18:07:06 crc kubenswrapper[4835]: I0202 18:07:06.759946 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-utilities" (OuterVolumeSpecName: "utilities") pod "010f2927-4f9c-4a7f-b67f-1e126db228c8" (UID: "010f2927-4f9c-4a7f-b67f-1e126db228c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:07:06 crc kubenswrapper[4835]: I0202 18:07:06.767649 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/010f2927-4f9c-4a7f-b67f-1e126db228c8-kube-api-access-gwvml" (OuterVolumeSpecName: "kube-api-access-gwvml") pod "010f2927-4f9c-4a7f-b67f-1e126db228c8" (UID: "010f2927-4f9c-4a7f-b67f-1e126db228c8"). InnerVolumeSpecName "kube-api-access-gwvml". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:07:06 crc kubenswrapper[4835]: I0202 18:07:06.861485 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 18:07:06 crc kubenswrapper[4835]: I0202 18:07:06.861523 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwvml\" (UniqueName: \"kubernetes.io/projected/010f2927-4f9c-4a7f-b67f-1e126db228c8-kube-api-access-gwvml\") on node \"crc\" DevicePath \"\"" Feb 02 18:07:06 crc kubenswrapper[4835]: I0202 18:07:06.875157 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "010f2927-4f9c-4a7f-b67f-1e126db228c8" (UID: "010f2927-4f9c-4a7f-b67f-1e126db228c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:07:06 crc kubenswrapper[4835]: I0202 18:07:06.963878 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/010f2927-4f9c-4a7f-b67f-1e126db228c8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.183829 4835 generic.go:334] "Generic (PLEG): container finished" podID="010f2927-4f9c-4a7f-b67f-1e126db228c8" containerID="2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8" exitCode=0 Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.183893 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j7rvh" Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.183908 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7rvh" event={"ID":"010f2927-4f9c-4a7f-b67f-1e126db228c8","Type":"ContainerDied","Data":"2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8"} Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.183938 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j7rvh" event={"ID":"010f2927-4f9c-4a7f-b67f-1e126db228c8","Type":"ContainerDied","Data":"022c2d5caba3dd034a0ef7f8b86bcec27ab0199e817657ff00a98b20f79ab489"} Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.183959 4835 scope.go:117] "RemoveContainer" containerID="2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8" Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.206154 4835 scope.go:117] "RemoveContainer" containerID="8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788" Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.215175 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j7rvh"] Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.253577 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-j7rvh"] Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.259505 4835 scope.go:117] "RemoveContainer" containerID="b9e0d034ae2263200f04c788a416aab4353dd3beee9628821e82f83cee28d8a7" Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.302866 4835 scope.go:117] "RemoveContainer" containerID="2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8" Feb 02 18:07:07 crc kubenswrapper[4835]: E0202 18:07:07.306606 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8\": container with ID starting with 2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8 not found: ID does not exist" containerID="2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8" Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.306665 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8"} err="failed to get container status \"2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8\": rpc error: code = NotFound desc = could not find container \"2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8\": container with ID starting with 2f08a832a87d6bd37f6207dbce45b3a6c03fb30350e97bd509063df0416bf1b8 not found: ID does not exist" Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.306698 4835 scope.go:117] "RemoveContainer" containerID="8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788" Feb 02 18:07:07 crc kubenswrapper[4835]: E0202 18:07:07.308070 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788\": container with ID starting with 8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788 not found: ID does not exist" containerID="8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788" Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.308106 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788"} err="failed to get container status \"8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788\": rpc error: code = NotFound desc = could not find container \"8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788\": container with ID starting with 8b6536f73f2abfae1b2d82b97bec02feaa73c74f555314187173e44a70296788 not found: ID does not exist" Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.308125 4835 scope.go:117] "RemoveContainer" containerID="b9e0d034ae2263200f04c788a416aab4353dd3beee9628821e82f83cee28d8a7" Feb 02 18:07:07 crc kubenswrapper[4835]: E0202 18:07:07.308522 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9e0d034ae2263200f04c788a416aab4353dd3beee9628821e82f83cee28d8a7\": container with ID starting with b9e0d034ae2263200f04c788a416aab4353dd3beee9628821e82f83cee28d8a7 not found: ID does not exist" containerID="b9e0d034ae2263200f04c788a416aab4353dd3beee9628821e82f83cee28d8a7" Feb 02 18:07:07 crc kubenswrapper[4835]: I0202 18:07:07.308548 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9e0d034ae2263200f04c788a416aab4353dd3beee9628821e82f83cee28d8a7"} err="failed to get container status \"b9e0d034ae2263200f04c788a416aab4353dd3beee9628821e82f83cee28d8a7\": rpc error: code = NotFound desc = could not find container \"b9e0d034ae2263200f04c788a416aab4353dd3beee9628821e82f83cee28d8a7\": container with ID starting with b9e0d034ae2263200f04c788a416aab4353dd3beee9628821e82f83cee28d8a7 not found: ID does not exist" Feb 02 18:07:08 crc kubenswrapper[4835]: I0202 18:07:08.113231 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/util/0.log" Feb 02 18:07:08 crc kubenswrapper[4835]: I0202 18:07:08.425261 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/util/0.log" Feb 02 18:07:08 crc kubenswrapper[4835]: I0202 18:07:08.447673 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/pull/0.log" Feb 02 18:07:08 crc kubenswrapper[4835]: I0202 18:07:08.453542 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/pull/0.log" Feb 02 18:07:08 crc kubenswrapper[4835]: I0202 18:07:08.616370 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/util/0.log" Feb 02 18:07:08 crc kubenswrapper[4835]: I0202 18:07:08.630934 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/pull/0.log" Feb 02 18:07:08 crc kubenswrapper[4835]: I0202 18:07:08.660933 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/extract/0.log" Feb 02 18:07:08 crc kubenswrapper[4835]: I0202 18:07:08.820533 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/util/0.log" Feb 02 18:07:08 crc kubenswrapper[4835]: I0202 18:07:08.981111 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/util/0.log" Feb 02 18:07:09 crc kubenswrapper[4835]: I0202 18:07:09.010087 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/pull/0.log" Feb 02 18:07:09 crc kubenswrapper[4835]: I0202 18:07:09.035917 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/pull/0.log" Feb 02 18:07:09 crc kubenswrapper[4835]: I0202 18:07:09.201694 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="010f2927-4f9c-4a7f-b67f-1e126db228c8" path="/var/lib/kubelet/pods/010f2927-4f9c-4a7f-b67f-1e126db228c8/volumes" Feb 02 18:07:09 crc kubenswrapper[4835]: I0202 18:07:09.289349 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/pull/0.log" Feb 02 18:07:09 crc kubenswrapper[4835]: I0202 18:07:09.313623 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/extract/0.log" Feb 02 18:07:09 crc kubenswrapper[4835]: I0202 18:07:09.327609 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/util/0.log" Feb 02 18:07:09 crc kubenswrapper[4835]: I0202 18:07:09.677626 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-utilities/0.log" Feb 02 18:07:09 crc kubenswrapper[4835]: I0202 18:07:09.817831 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-utilities/0.log" Feb 02 18:07:09 crc kubenswrapper[4835]: I0202 18:07:09.847165 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-content/0.log" Feb 02 18:07:09 crc kubenswrapper[4835]: I0202 18:07:09.854412 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-content/0.log" Feb 02 18:07:10 crc kubenswrapper[4835]: I0202 18:07:10.188655 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:07:10 crc kubenswrapper[4835]: E0202 18:07:10.188907 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:07:10 crc kubenswrapper[4835]: I0202 18:07:10.641166 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-content/0.log" Feb 02 18:07:10 crc kubenswrapper[4835]: I0202 18:07:10.646644 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-utilities/0.log" Feb 02 18:07:10 crc kubenswrapper[4835]: I0202 18:07:10.845107 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-utilities/0.log" Feb 02 18:07:11 crc kubenswrapper[4835]: I0202 18:07:11.091462 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-content/0.log" Feb 02 18:07:11 crc kubenswrapper[4835]: I0202 18:07:11.129301 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-utilities/0.log" Feb 02 18:07:11 crc kubenswrapper[4835]: I0202 18:07:11.130180 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-content/0.log" Feb 02 18:07:11 crc kubenswrapper[4835]: I0202 18:07:11.193073 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/registry-server/0.log" Feb 02 18:07:11 crc kubenswrapper[4835]: I0202 18:07:11.295592 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-utilities/0.log" Feb 02 18:07:11 crc kubenswrapper[4835]: I0202 18:07:11.374706 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-content/0.log" Feb 02 18:07:11 crc kubenswrapper[4835]: I0202 18:07:11.569435 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-lmjmd_624437d3-bcc0-40bc-bc25-d8876722dbc8/marketplace-operator/0.log" Feb 02 18:07:11 crc kubenswrapper[4835]: I0202 18:07:11.784162 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-utilities/0.log" Feb 02 18:07:12 crc kubenswrapper[4835]: I0202 18:07:12.114044 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/registry-server/0.log" Feb 02 18:07:12 crc kubenswrapper[4835]: I0202 18:07:12.318009 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-utilities/0.log" Feb 02 18:07:12 crc kubenswrapper[4835]: I0202 18:07:12.343513 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-content/0.log" Feb 02 18:07:12 crc kubenswrapper[4835]: I0202 18:07:12.380340 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-content/0.log" Feb 02 18:07:12 crc kubenswrapper[4835]: I0202 18:07:12.549025 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-utilities/0.log" Feb 02 18:07:12 crc kubenswrapper[4835]: I0202 18:07:12.576118 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-content/0.log" Feb 02 18:07:12 crc kubenswrapper[4835]: I0202 18:07:12.697898 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-utilities/0.log" Feb 02 18:07:12 crc kubenswrapper[4835]: I0202 18:07:12.769303 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/registry-server/0.log" Feb 02 18:07:12 crc kubenswrapper[4835]: I0202 18:07:12.864161 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-content/0.log" Feb 02 18:07:12 crc kubenswrapper[4835]: I0202 18:07:12.892783 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-content/0.log" Feb 02 18:07:12 crc kubenswrapper[4835]: I0202 18:07:12.895289 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-utilities/0.log" Feb 02 18:07:13 crc kubenswrapper[4835]: I0202 18:07:13.071703 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-utilities/0.log" Feb 02 18:07:13 crc kubenswrapper[4835]: I0202 18:07:13.086215 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-content/0.log" Feb 02 18:07:13 crc kubenswrapper[4835]: I0202 18:07:13.738669 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/registry-server/0.log" Feb 02 18:07:22 crc kubenswrapper[4835]: I0202 18:07:22.188963 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:07:22 crc kubenswrapper[4835]: E0202 18:07:22.189728 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:07:34 crc kubenswrapper[4835]: I0202 18:07:34.188937 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:07:34 crc kubenswrapper[4835]: E0202 18:07:34.189581 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:07:47 crc kubenswrapper[4835]: I0202 18:07:47.190035 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:07:47 crc kubenswrapper[4835]: E0202 18:07:47.190677 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:08:02 crc kubenswrapper[4835]: I0202 18:08:02.188471 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:08:02 crc kubenswrapper[4835]: E0202 18:08:02.189183 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:08:13 crc kubenswrapper[4835]: I0202 18:08:13.189582 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:08:13 crc kubenswrapper[4835]: E0202 18:08:13.190343 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:08:26 crc kubenswrapper[4835]: I0202 18:08:26.188959 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:08:26 crc kubenswrapper[4835]: E0202 18:08:26.189777 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:08:38 crc kubenswrapper[4835]: I0202 18:08:38.188817 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:08:38 crc kubenswrapper[4835]: E0202 18:08:38.189552 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:08:49 crc kubenswrapper[4835]: I0202 18:08:49.196031 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:08:49 crc kubenswrapper[4835]: E0202 18:08:49.196912 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:09:04 crc kubenswrapper[4835]: I0202 18:09:04.189225 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:09:04 crc kubenswrapper[4835]: E0202 18:09:04.190027 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:09:18 crc kubenswrapper[4835]: I0202 18:09:18.337996 4835 generic.go:334] "Generic (PLEG): container finished" podID="3a699053-df9a-495a-83bb-7f1612f0e615" containerID="441c57423a12074e541e5032a956e37cee4b3ba6a3e518d28986354b89486de0" exitCode=0 Feb 02 18:09:18 crc kubenswrapper[4835]: I0202 18:09:18.338074 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t87f4/must-gather-x7xcb" event={"ID":"3a699053-df9a-495a-83bb-7f1612f0e615","Type":"ContainerDied","Data":"441c57423a12074e541e5032a956e37cee4b3ba6a3e518d28986354b89486de0"} Feb 02 18:09:18 crc kubenswrapper[4835]: I0202 18:09:18.339102 4835 scope.go:117] "RemoveContainer" containerID="441c57423a12074e541e5032a956e37cee4b3ba6a3e518d28986354b89486de0" Feb 02 18:09:18 crc kubenswrapper[4835]: I0202 18:09:18.474201 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-t87f4_must-gather-x7xcb_3a699053-df9a-495a-83bb-7f1612f0e615/gather/0.log" Feb 02 18:09:19 crc kubenswrapper[4835]: I0202 18:09:19.189762 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:09:19 crc kubenswrapper[4835]: E0202 18:09:19.190130 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:09:26 crc kubenswrapper[4835]: I0202 18:09:26.987135 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-t87f4/must-gather-x7xcb"] Feb 02 18:09:26 crc kubenswrapper[4835]: I0202 18:09:26.987995 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-t87f4/must-gather-x7xcb" podUID="3a699053-df9a-495a-83bb-7f1612f0e615" containerName="copy" containerID="cri-o://335122e7f22ef4afa318583565477d9694243c921634eba058a35f7673fb4393" gracePeriod=2 Feb 02 18:09:26 crc kubenswrapper[4835]: I0202 18:09:26.997736 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-t87f4/must-gather-x7xcb"] Feb 02 18:09:27 crc kubenswrapper[4835]: I0202 18:09:27.429001 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-t87f4_must-gather-x7xcb_3a699053-df9a-495a-83bb-7f1612f0e615/copy/0.log" Feb 02 18:09:27 crc kubenswrapper[4835]: I0202 18:09:27.429731 4835 generic.go:334] "Generic (PLEG): container finished" podID="3a699053-df9a-495a-83bb-7f1612f0e615" containerID="335122e7f22ef4afa318583565477d9694243c921634eba058a35f7673fb4393" exitCode=143 Feb 02 18:09:27 crc kubenswrapper[4835]: I0202 18:09:27.746068 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-t87f4_must-gather-x7xcb_3a699053-df9a-495a-83bb-7f1612f0e615/copy/0.log" Feb 02 18:09:27 crc kubenswrapper[4835]: I0202 18:09:27.746655 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/must-gather-x7xcb" Feb 02 18:09:27 crc kubenswrapper[4835]: I0202 18:09:27.850056 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3a699053-df9a-495a-83bb-7f1612f0e615-must-gather-output\") pod \"3a699053-df9a-495a-83bb-7f1612f0e615\" (UID: \"3a699053-df9a-495a-83bb-7f1612f0e615\") " Feb 02 18:09:27 crc kubenswrapper[4835]: I0202 18:09:27.850250 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhdhn\" (UniqueName: \"kubernetes.io/projected/3a699053-df9a-495a-83bb-7f1612f0e615-kube-api-access-vhdhn\") pod \"3a699053-df9a-495a-83bb-7f1612f0e615\" (UID: \"3a699053-df9a-495a-83bb-7f1612f0e615\") " Feb 02 18:09:27 crc kubenswrapper[4835]: I0202 18:09:27.855904 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a699053-df9a-495a-83bb-7f1612f0e615-kube-api-access-vhdhn" (OuterVolumeSpecName: "kube-api-access-vhdhn") pod "3a699053-df9a-495a-83bb-7f1612f0e615" (UID: "3a699053-df9a-495a-83bb-7f1612f0e615"). InnerVolumeSpecName "kube-api-access-vhdhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:09:27 crc kubenswrapper[4835]: I0202 18:09:27.953058 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhdhn\" (UniqueName: \"kubernetes.io/projected/3a699053-df9a-495a-83bb-7f1612f0e615-kube-api-access-vhdhn\") on node \"crc\" DevicePath \"\"" Feb 02 18:09:28 crc kubenswrapper[4835]: I0202 18:09:28.024663 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a699053-df9a-495a-83bb-7f1612f0e615-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "3a699053-df9a-495a-83bb-7f1612f0e615" (UID: "3a699053-df9a-495a-83bb-7f1612f0e615"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:09:28 crc kubenswrapper[4835]: I0202 18:09:28.054733 4835 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3a699053-df9a-495a-83bb-7f1612f0e615-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 02 18:09:28 crc kubenswrapper[4835]: I0202 18:09:28.441539 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-t87f4_must-gather-x7xcb_3a699053-df9a-495a-83bb-7f1612f0e615/copy/0.log" Feb 02 18:09:28 crc kubenswrapper[4835]: I0202 18:09:28.441901 4835 scope.go:117] "RemoveContainer" containerID="335122e7f22ef4afa318583565477d9694243c921634eba058a35f7673fb4393" Feb 02 18:09:28 crc kubenswrapper[4835]: I0202 18:09:28.441925 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t87f4/must-gather-x7xcb" Feb 02 18:09:28 crc kubenswrapper[4835]: I0202 18:09:28.461485 4835 scope.go:117] "RemoveContainer" containerID="441c57423a12074e541e5032a956e37cee4b3ba6a3e518d28986354b89486de0" Feb 02 18:09:29 crc kubenswrapper[4835]: I0202 18:09:29.199920 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a699053-df9a-495a-83bb-7f1612f0e615" path="/var/lib/kubelet/pods/3a699053-df9a-495a-83bb-7f1612f0e615/volumes" Feb 02 18:09:34 crc kubenswrapper[4835]: I0202 18:09:34.190242 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:09:34 crc kubenswrapper[4835]: E0202 18:09:34.190933 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:09:47 crc kubenswrapper[4835]: I0202 18:09:47.188725 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:09:47 crc kubenswrapper[4835]: E0202 18:09:47.189446 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:09:58 crc kubenswrapper[4835]: I0202 18:09:58.189613 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:09:58 crc kubenswrapper[4835]: E0202 18:09:58.190438 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:10:09 crc kubenswrapper[4835]: I0202 18:10:09.194762 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:10:09 crc kubenswrapper[4835]: E0202 18:10:09.195562 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:10:15 crc kubenswrapper[4835]: I0202 18:10:15.437962 4835 scope.go:117] "RemoveContainer" containerID="2b4f80a547d47e31da1087af82c95eef7b684c86f5e9183e24db3b894571d136" Feb 02 18:10:21 crc kubenswrapper[4835]: I0202 18:10:21.188984 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:10:21 crc kubenswrapper[4835]: E0202 18:10:21.189777 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:10:34 crc kubenswrapper[4835]: I0202 18:10:34.188988 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:10:34 crc kubenswrapper[4835]: E0202 18:10:34.189726 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:10:47 crc kubenswrapper[4835]: I0202 18:10:47.188528 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:10:47 crc kubenswrapper[4835]: E0202 18:10:47.189287 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:11:01 crc kubenswrapper[4835]: I0202 18:11:01.189546 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:11:01 crc kubenswrapper[4835]: E0202 18:11:01.191294 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:11:15 crc kubenswrapper[4835]: I0202 18:11:15.188521 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:11:16 crc kubenswrapper[4835]: I0202 18:11:16.400952 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"6de61775b6dfa0919621a117267aee27e136e485973852f07aa114fdff41e432"} Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.950397 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-55f57/must-gather-62x9s"] Feb 02 18:12:34 crc kubenswrapper[4835]: E0202 18:12:34.952013 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a699053-df9a-495a-83bb-7f1612f0e615" containerName="gather" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.952029 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a699053-df9a-495a-83bb-7f1612f0e615" containerName="gather" Feb 02 18:12:34 crc kubenswrapper[4835]: E0202 18:12:34.952060 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="010f2927-4f9c-4a7f-b67f-1e126db228c8" containerName="extract-content" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.952066 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="010f2927-4f9c-4a7f-b67f-1e126db228c8" containerName="extract-content" Feb 02 18:12:34 crc kubenswrapper[4835]: E0202 18:12:34.952083 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="010f2927-4f9c-4a7f-b67f-1e126db228c8" containerName="registry-server" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.952090 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="010f2927-4f9c-4a7f-b67f-1e126db228c8" containerName="registry-server" Feb 02 18:12:34 crc kubenswrapper[4835]: E0202 18:12:34.952106 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a699053-df9a-495a-83bb-7f1612f0e615" containerName="copy" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.952115 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a699053-df9a-495a-83bb-7f1612f0e615" containerName="copy" Feb 02 18:12:34 crc kubenswrapper[4835]: E0202 18:12:34.952132 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="010f2927-4f9c-4a7f-b67f-1e126db228c8" containerName="extract-utilities" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.952139 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="010f2927-4f9c-4a7f-b67f-1e126db228c8" containerName="extract-utilities" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.952477 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a699053-df9a-495a-83bb-7f1612f0e615" containerName="copy" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.952491 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="010f2927-4f9c-4a7f-b67f-1e126db228c8" containerName="registry-server" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.952521 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a699053-df9a-495a-83bb-7f1612f0e615" containerName="gather" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.954104 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/must-gather-62x9s" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.958535 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-55f57"/"kube-root-ca.crt" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.959001 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-55f57"/"openshift-service-ca.crt" Feb 02 18:12:34 crc kubenswrapper[4835]: I0202 18:12:34.985357 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-55f57/must-gather-62x9s"] Feb 02 18:12:35 crc kubenswrapper[4835]: I0202 18:12:35.137287 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5xdp\" (UniqueName: \"kubernetes.io/projected/13b4415f-4940-4f96-a40b-51262b480e89-kube-api-access-w5xdp\") pod \"must-gather-62x9s\" (UID: \"13b4415f-4940-4f96-a40b-51262b480e89\") " pod="openshift-must-gather-55f57/must-gather-62x9s" Feb 02 18:12:35 crc kubenswrapper[4835]: I0202 18:12:35.137679 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/13b4415f-4940-4f96-a40b-51262b480e89-must-gather-output\") pod \"must-gather-62x9s\" (UID: \"13b4415f-4940-4f96-a40b-51262b480e89\") " pod="openshift-must-gather-55f57/must-gather-62x9s" Feb 02 18:12:35 crc kubenswrapper[4835]: I0202 18:12:35.240468 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/13b4415f-4940-4f96-a40b-51262b480e89-must-gather-output\") pod \"must-gather-62x9s\" (UID: \"13b4415f-4940-4f96-a40b-51262b480e89\") " pod="openshift-must-gather-55f57/must-gather-62x9s" Feb 02 18:12:35 crc kubenswrapper[4835]: I0202 18:12:35.240653 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5xdp\" (UniqueName: \"kubernetes.io/projected/13b4415f-4940-4f96-a40b-51262b480e89-kube-api-access-w5xdp\") pod \"must-gather-62x9s\" (UID: \"13b4415f-4940-4f96-a40b-51262b480e89\") " pod="openshift-must-gather-55f57/must-gather-62x9s" Feb 02 18:12:35 crc kubenswrapper[4835]: I0202 18:12:35.241039 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/13b4415f-4940-4f96-a40b-51262b480e89-must-gather-output\") pod \"must-gather-62x9s\" (UID: \"13b4415f-4940-4f96-a40b-51262b480e89\") " pod="openshift-must-gather-55f57/must-gather-62x9s" Feb 02 18:12:35 crc kubenswrapper[4835]: I0202 18:12:35.261537 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5xdp\" (UniqueName: \"kubernetes.io/projected/13b4415f-4940-4f96-a40b-51262b480e89-kube-api-access-w5xdp\") pod \"must-gather-62x9s\" (UID: \"13b4415f-4940-4f96-a40b-51262b480e89\") " pod="openshift-must-gather-55f57/must-gather-62x9s" Feb 02 18:12:35 crc kubenswrapper[4835]: I0202 18:12:35.299479 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/must-gather-62x9s" Feb 02 18:12:35 crc kubenswrapper[4835]: I0202 18:12:35.876778 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-55f57/must-gather-62x9s"] Feb 02 18:12:36 crc kubenswrapper[4835]: I0202 18:12:36.077422 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/must-gather-62x9s" event={"ID":"13b4415f-4940-4f96-a40b-51262b480e89","Type":"ContainerStarted","Data":"aa2bec1ff1c07096afc4b55170a7f75a93edb2180162e08bb33cf680eeac3e6b"} Feb 02 18:12:37 crc kubenswrapper[4835]: I0202 18:12:37.087298 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/must-gather-62x9s" event={"ID":"13b4415f-4940-4f96-a40b-51262b480e89","Type":"ContainerStarted","Data":"4d38f1d4e3ed93c7f0455ff9826c8b467dcaae99e4ac9f7074dfc361d7a2351e"} Feb 02 18:12:37 crc kubenswrapper[4835]: I0202 18:12:37.087619 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/must-gather-62x9s" event={"ID":"13b4415f-4940-4f96-a40b-51262b480e89","Type":"ContainerStarted","Data":"c3898ec793c1d1d3afcfe98e0b8d81f5ca30426cba9714d997d6a6ff5a89938a"} Feb 02 18:12:37 crc kubenswrapper[4835]: I0202 18:12:37.108716 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-55f57/must-gather-62x9s" podStartSLOduration=3.108700494 podStartE2EDuration="3.108700494s" podCreationTimestamp="2026-02-02 18:12:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 18:12:37.107423888 +0000 UTC m=+4948.729027968" watchObservedRunningTime="2026-02-02 18:12:37.108700494 +0000 UTC m=+4948.730304564" Feb 02 18:12:40 crc kubenswrapper[4835]: I0202 18:12:40.419844 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-55f57/crc-debug-qclvl"] Feb 02 18:12:40 crc kubenswrapper[4835]: I0202 18:12:40.421352 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-qclvl" Feb 02 18:12:40 crc kubenswrapper[4835]: I0202 18:12:40.422849 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-55f57"/"default-dockercfg-jvctj" Feb 02 18:12:40 crc kubenswrapper[4835]: I0202 18:12:40.532806 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtrjh\" (UniqueName: \"kubernetes.io/projected/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-kube-api-access-rtrjh\") pod \"crc-debug-qclvl\" (UID: \"84a3d66b-8047-4eb2-83f0-6ff092cbec0a\") " pod="openshift-must-gather-55f57/crc-debug-qclvl" Feb 02 18:12:40 crc kubenswrapper[4835]: I0202 18:12:40.532960 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-host\") pod \"crc-debug-qclvl\" (UID: \"84a3d66b-8047-4eb2-83f0-6ff092cbec0a\") " pod="openshift-must-gather-55f57/crc-debug-qclvl" Feb 02 18:12:40 crc kubenswrapper[4835]: I0202 18:12:40.635310 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtrjh\" (UniqueName: \"kubernetes.io/projected/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-kube-api-access-rtrjh\") pod \"crc-debug-qclvl\" (UID: \"84a3d66b-8047-4eb2-83f0-6ff092cbec0a\") " pod="openshift-must-gather-55f57/crc-debug-qclvl" Feb 02 18:12:40 crc kubenswrapper[4835]: I0202 18:12:40.635480 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-host\") pod \"crc-debug-qclvl\" (UID: \"84a3d66b-8047-4eb2-83f0-6ff092cbec0a\") " pod="openshift-must-gather-55f57/crc-debug-qclvl" Feb 02 18:12:40 crc kubenswrapper[4835]: I0202 18:12:40.635656 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-host\") pod \"crc-debug-qclvl\" (UID: \"84a3d66b-8047-4eb2-83f0-6ff092cbec0a\") " pod="openshift-must-gather-55f57/crc-debug-qclvl" Feb 02 18:12:40 crc kubenswrapper[4835]: I0202 18:12:40.655978 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtrjh\" (UniqueName: \"kubernetes.io/projected/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-kube-api-access-rtrjh\") pod \"crc-debug-qclvl\" (UID: \"84a3d66b-8047-4eb2-83f0-6ff092cbec0a\") " pod="openshift-must-gather-55f57/crc-debug-qclvl" Feb 02 18:12:40 crc kubenswrapper[4835]: I0202 18:12:40.742813 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-qclvl" Feb 02 18:12:40 crc kubenswrapper[4835]: W0202 18:12:40.779447 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84a3d66b_8047_4eb2_83f0_6ff092cbec0a.slice/crio-b6e8f901cb6b745764ed667e3f10663bd37be434507154b864010f761d83a9d8 WatchSource:0}: Error finding container b6e8f901cb6b745764ed667e3f10663bd37be434507154b864010f761d83a9d8: Status 404 returned error can't find the container with id b6e8f901cb6b745764ed667e3f10663bd37be434507154b864010f761d83a9d8 Feb 02 18:12:41 crc kubenswrapper[4835]: I0202 18:12:41.126918 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/crc-debug-qclvl" event={"ID":"84a3d66b-8047-4eb2-83f0-6ff092cbec0a","Type":"ContainerStarted","Data":"b34c3172e3158902cf16ff6a90590bf4c8e8b7887f608874e7f5f4c1f8d2f8eb"} Feb 02 18:12:41 crc kubenswrapper[4835]: I0202 18:12:41.127346 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/crc-debug-qclvl" event={"ID":"84a3d66b-8047-4eb2-83f0-6ff092cbec0a","Type":"ContainerStarted","Data":"b6e8f901cb6b745764ed667e3f10663bd37be434507154b864010f761d83a9d8"} Feb 02 18:12:41 crc kubenswrapper[4835]: I0202 18:12:41.144951 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-55f57/crc-debug-qclvl" podStartSLOduration=1.144932341 podStartE2EDuration="1.144932341s" podCreationTimestamp="2026-02-02 18:12:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 18:12:41.139866007 +0000 UTC m=+4952.761470107" watchObservedRunningTime="2026-02-02 18:12:41.144932341 +0000 UTC m=+4952.766536421" Feb 02 18:13:25 crc kubenswrapper[4835]: I0202 18:13:25.547050 4835 generic.go:334] "Generic (PLEG): container finished" podID="84a3d66b-8047-4eb2-83f0-6ff092cbec0a" containerID="b34c3172e3158902cf16ff6a90590bf4c8e8b7887f608874e7f5f4c1f8d2f8eb" exitCode=0 Feb 02 18:13:25 crc kubenswrapper[4835]: I0202 18:13:25.547172 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/crc-debug-qclvl" event={"ID":"84a3d66b-8047-4eb2-83f0-6ff092cbec0a","Type":"ContainerDied","Data":"b34c3172e3158902cf16ff6a90590bf4c8e8b7887f608874e7f5f4c1f8d2f8eb"} Feb 02 18:13:26 crc kubenswrapper[4835]: I0202 18:13:26.675939 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-qclvl" Feb 02 18:13:26 crc kubenswrapper[4835]: I0202 18:13:26.721561 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-55f57/crc-debug-qclvl"] Feb 02 18:13:26 crc kubenswrapper[4835]: I0202 18:13:26.729084 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-55f57/crc-debug-qclvl"] Feb 02 18:13:26 crc kubenswrapper[4835]: I0202 18:13:26.793363 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-host\") pod \"84a3d66b-8047-4eb2-83f0-6ff092cbec0a\" (UID: \"84a3d66b-8047-4eb2-83f0-6ff092cbec0a\") " Feb 02 18:13:26 crc kubenswrapper[4835]: I0202 18:13:26.793626 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtrjh\" (UniqueName: \"kubernetes.io/projected/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-kube-api-access-rtrjh\") pod \"84a3d66b-8047-4eb2-83f0-6ff092cbec0a\" (UID: \"84a3d66b-8047-4eb2-83f0-6ff092cbec0a\") " Feb 02 18:13:26 crc kubenswrapper[4835]: I0202 18:13:26.794078 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-host" (OuterVolumeSpecName: "host") pod "84a3d66b-8047-4eb2-83f0-6ff092cbec0a" (UID: "84a3d66b-8047-4eb2-83f0-6ff092cbec0a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 18:13:26 crc kubenswrapper[4835]: I0202 18:13:26.798929 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-kube-api-access-rtrjh" (OuterVolumeSpecName: "kube-api-access-rtrjh") pod "84a3d66b-8047-4eb2-83f0-6ff092cbec0a" (UID: "84a3d66b-8047-4eb2-83f0-6ff092cbec0a"). InnerVolumeSpecName "kube-api-access-rtrjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:13:26 crc kubenswrapper[4835]: I0202 18:13:26.895709 4835 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-host\") on node \"crc\" DevicePath \"\"" Feb 02 18:13:26 crc kubenswrapper[4835]: I0202 18:13:26.895750 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtrjh\" (UniqueName: \"kubernetes.io/projected/84a3d66b-8047-4eb2-83f0-6ff092cbec0a-kube-api-access-rtrjh\") on node \"crc\" DevicePath \"\"" Feb 02 18:13:27 crc kubenswrapper[4835]: I0202 18:13:27.204152 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84a3d66b-8047-4eb2-83f0-6ff092cbec0a" path="/var/lib/kubelet/pods/84a3d66b-8047-4eb2-83f0-6ff092cbec0a/volumes" Feb 02 18:13:27 crc kubenswrapper[4835]: I0202 18:13:27.565217 4835 scope.go:117] "RemoveContainer" containerID="b34c3172e3158902cf16ff6a90590bf4c8e8b7887f608874e7f5f4c1f8d2f8eb" Feb 02 18:13:27 crc kubenswrapper[4835]: I0202 18:13:27.565246 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-qclvl" Feb 02 18:13:27 crc kubenswrapper[4835]: I0202 18:13:27.919528 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-55f57/crc-debug-d7jcl"] Feb 02 18:13:27 crc kubenswrapper[4835]: E0202 18:13:27.920204 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a3d66b-8047-4eb2-83f0-6ff092cbec0a" containerName="container-00" Feb 02 18:13:27 crc kubenswrapper[4835]: I0202 18:13:27.920217 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a3d66b-8047-4eb2-83f0-6ff092cbec0a" containerName="container-00" Feb 02 18:13:27 crc kubenswrapper[4835]: I0202 18:13:27.920484 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="84a3d66b-8047-4eb2-83f0-6ff092cbec0a" containerName="container-00" Feb 02 18:13:27 crc kubenswrapper[4835]: I0202 18:13:27.921096 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-d7jcl" Feb 02 18:13:27 crc kubenswrapper[4835]: I0202 18:13:27.923842 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-55f57"/"default-dockercfg-jvctj" Feb 02 18:13:28 crc kubenswrapper[4835]: I0202 18:13:28.018697 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-host\") pod \"crc-debug-d7jcl\" (UID: \"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc\") " pod="openshift-must-gather-55f57/crc-debug-d7jcl" Feb 02 18:13:28 crc kubenswrapper[4835]: I0202 18:13:28.018870 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfbgc\" (UniqueName: \"kubernetes.io/projected/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-kube-api-access-nfbgc\") pod \"crc-debug-d7jcl\" (UID: \"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc\") " pod="openshift-must-gather-55f57/crc-debug-d7jcl" Feb 02 18:13:28 crc kubenswrapper[4835]: I0202 18:13:28.122417 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-host\") pod \"crc-debug-d7jcl\" (UID: \"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc\") " pod="openshift-must-gather-55f57/crc-debug-d7jcl" Feb 02 18:13:28 crc kubenswrapper[4835]: I0202 18:13:28.122582 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfbgc\" (UniqueName: \"kubernetes.io/projected/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-kube-api-access-nfbgc\") pod \"crc-debug-d7jcl\" (UID: \"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc\") " pod="openshift-must-gather-55f57/crc-debug-d7jcl" Feb 02 18:13:28 crc kubenswrapper[4835]: I0202 18:13:28.122514 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-host\") pod \"crc-debug-d7jcl\" (UID: \"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc\") " pod="openshift-must-gather-55f57/crc-debug-d7jcl" Feb 02 18:13:28 crc kubenswrapper[4835]: I0202 18:13:28.146799 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfbgc\" (UniqueName: \"kubernetes.io/projected/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-kube-api-access-nfbgc\") pod \"crc-debug-d7jcl\" (UID: \"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc\") " pod="openshift-must-gather-55f57/crc-debug-d7jcl" Feb 02 18:13:28 crc kubenswrapper[4835]: I0202 18:13:28.239460 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-d7jcl" Feb 02 18:13:28 crc kubenswrapper[4835]: I0202 18:13:28.577002 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/crc-debug-d7jcl" event={"ID":"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc","Type":"ContainerStarted","Data":"9b262a7058f6bf7957ad93d92efb5345f0de618933963a2faf5de92aab7f73d5"} Feb 02 18:13:28 crc kubenswrapper[4835]: I0202 18:13:28.577347 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/crc-debug-d7jcl" event={"ID":"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc","Type":"ContainerStarted","Data":"5dd15372da6e8ef827a0fdeda6f3bd04a6e060f89727c0ebdb06ce42b62be5d8"} Feb 02 18:13:28 crc kubenswrapper[4835]: I0202 18:13:28.623693 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-55f57/crc-debug-d7jcl" podStartSLOduration=1.623667746 podStartE2EDuration="1.623667746s" podCreationTimestamp="2026-02-02 18:13:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 18:13:28.593287116 +0000 UTC m=+5000.214891196" watchObservedRunningTime="2026-02-02 18:13:28.623667746 +0000 UTC m=+5000.245271826" Feb 02 18:13:29 crc kubenswrapper[4835]: I0202 18:13:29.585909 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/crc-debug-d7jcl" event={"ID":"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc","Type":"ContainerDied","Data":"9b262a7058f6bf7957ad93d92efb5345f0de618933963a2faf5de92aab7f73d5"} Feb 02 18:13:29 crc kubenswrapper[4835]: I0202 18:13:29.585925 4835 generic.go:334] "Generic (PLEG): container finished" podID="a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc" containerID="9b262a7058f6bf7957ad93d92efb5345f0de618933963a2faf5de92aab7f73d5" exitCode=0 Feb 02 18:13:30 crc kubenswrapper[4835]: I0202 18:13:30.740460 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-d7jcl" Feb 02 18:13:30 crc kubenswrapper[4835]: I0202 18:13:30.775200 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfbgc\" (UniqueName: \"kubernetes.io/projected/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-kube-api-access-nfbgc\") pod \"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc\" (UID: \"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc\") " Feb 02 18:13:30 crc kubenswrapper[4835]: I0202 18:13:30.775420 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-host\") pod \"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc\" (UID: \"a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc\") " Feb 02 18:13:30 crc kubenswrapper[4835]: I0202 18:13:30.775547 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-host" (OuterVolumeSpecName: "host") pod "a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc" (UID: "a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 18:13:30 crc kubenswrapper[4835]: I0202 18:13:30.776294 4835 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-host\") on node \"crc\" DevicePath \"\"" Feb 02 18:13:30 crc kubenswrapper[4835]: I0202 18:13:30.781240 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-kube-api-access-nfbgc" (OuterVolumeSpecName: "kube-api-access-nfbgc") pod "a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc" (UID: "a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc"). InnerVolumeSpecName "kube-api-access-nfbgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:13:30 crc kubenswrapper[4835]: I0202 18:13:30.877056 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfbgc\" (UniqueName: \"kubernetes.io/projected/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc-kube-api-access-nfbgc\") on node \"crc\" DevicePath \"\"" Feb 02 18:13:31 crc kubenswrapper[4835]: I0202 18:13:31.542047 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-55f57/crc-debug-d7jcl"] Feb 02 18:13:31 crc kubenswrapper[4835]: I0202 18:13:31.554348 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-55f57/crc-debug-d7jcl"] Feb 02 18:13:31 crc kubenswrapper[4835]: I0202 18:13:31.608805 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5dd15372da6e8ef827a0fdeda6f3bd04a6e060f89727c0ebdb06ce42b62be5d8" Feb 02 18:13:31 crc kubenswrapper[4835]: I0202 18:13:31.608872 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-d7jcl" Feb 02 18:13:32 crc kubenswrapper[4835]: I0202 18:13:32.741784 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-55f57/crc-debug-24mmp"] Feb 02 18:13:32 crc kubenswrapper[4835]: E0202 18:13:32.743543 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc" containerName="container-00" Feb 02 18:13:32 crc kubenswrapper[4835]: I0202 18:13:32.743566 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc" containerName="container-00" Feb 02 18:13:32 crc kubenswrapper[4835]: I0202 18:13:32.743768 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc" containerName="container-00" Feb 02 18:13:32 crc kubenswrapper[4835]: I0202 18:13:32.744726 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-24mmp" Feb 02 18:13:32 crc kubenswrapper[4835]: I0202 18:13:32.747851 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-55f57"/"default-dockercfg-jvctj" Feb 02 18:13:32 crc kubenswrapper[4835]: I0202 18:13:32.817710 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7v2b\" (UniqueName: \"kubernetes.io/projected/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-kube-api-access-j7v2b\") pod \"crc-debug-24mmp\" (UID: \"a6690cb1-678b-4ff3-913c-bc61aed9ef7d\") " pod="openshift-must-gather-55f57/crc-debug-24mmp" Feb 02 18:13:32 crc kubenswrapper[4835]: I0202 18:13:32.818043 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-host\") pod \"crc-debug-24mmp\" (UID: \"a6690cb1-678b-4ff3-913c-bc61aed9ef7d\") " pod="openshift-must-gather-55f57/crc-debug-24mmp" Feb 02 18:13:32 crc kubenswrapper[4835]: I0202 18:13:32.919926 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7v2b\" (UniqueName: \"kubernetes.io/projected/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-kube-api-access-j7v2b\") pod \"crc-debug-24mmp\" (UID: \"a6690cb1-678b-4ff3-913c-bc61aed9ef7d\") " pod="openshift-must-gather-55f57/crc-debug-24mmp" Feb 02 18:13:32 crc kubenswrapper[4835]: I0202 18:13:32.920052 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-host\") pod \"crc-debug-24mmp\" (UID: \"a6690cb1-678b-4ff3-913c-bc61aed9ef7d\") " pod="openshift-must-gather-55f57/crc-debug-24mmp" Feb 02 18:13:32 crc kubenswrapper[4835]: I0202 18:13:32.920212 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-host\") pod \"crc-debug-24mmp\" (UID: \"a6690cb1-678b-4ff3-913c-bc61aed9ef7d\") " pod="openshift-must-gather-55f57/crc-debug-24mmp" Feb 02 18:13:32 crc kubenswrapper[4835]: I0202 18:13:32.949618 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7v2b\" (UniqueName: \"kubernetes.io/projected/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-kube-api-access-j7v2b\") pod \"crc-debug-24mmp\" (UID: \"a6690cb1-678b-4ff3-913c-bc61aed9ef7d\") " pod="openshift-must-gather-55f57/crc-debug-24mmp" Feb 02 18:13:33 crc kubenswrapper[4835]: I0202 18:13:33.069158 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-24mmp" Feb 02 18:13:33 crc kubenswrapper[4835]: W0202 18:13:33.120612 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6690cb1_678b_4ff3_913c_bc61aed9ef7d.slice/crio-34956ed689450dcb63f64974bfb4cc0d3cd5a7261134ea1b49083fb25b7aa92d WatchSource:0}: Error finding container 34956ed689450dcb63f64974bfb4cc0d3cd5a7261134ea1b49083fb25b7aa92d: Status 404 returned error can't find the container with id 34956ed689450dcb63f64974bfb4cc0d3cd5a7261134ea1b49083fb25b7aa92d Feb 02 18:13:33 crc kubenswrapper[4835]: I0202 18:13:33.200739 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc" path="/var/lib/kubelet/pods/a5eb31c2-4ee2-4e21-a6e9-9c60b995f5bc/volumes" Feb 02 18:13:33 crc kubenswrapper[4835]: I0202 18:13:33.645988 4835 generic.go:334] "Generic (PLEG): container finished" podID="a6690cb1-678b-4ff3-913c-bc61aed9ef7d" containerID="c276c0add7054ce1c52e95036c166b02e4010e562ec48c02e63f2e8fbcab16a9" exitCode=0 Feb 02 18:13:33 crc kubenswrapper[4835]: I0202 18:13:33.646097 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/crc-debug-24mmp" event={"ID":"a6690cb1-678b-4ff3-913c-bc61aed9ef7d","Type":"ContainerDied","Data":"c276c0add7054ce1c52e95036c166b02e4010e562ec48c02e63f2e8fbcab16a9"} Feb 02 18:13:33 crc kubenswrapper[4835]: I0202 18:13:33.646262 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/crc-debug-24mmp" event={"ID":"a6690cb1-678b-4ff3-913c-bc61aed9ef7d","Type":"ContainerStarted","Data":"34956ed689450dcb63f64974bfb4cc0d3cd5a7261134ea1b49083fb25b7aa92d"} Feb 02 18:13:33 crc kubenswrapper[4835]: I0202 18:13:33.690879 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-55f57/crc-debug-24mmp"] Feb 02 18:13:33 crc kubenswrapper[4835]: I0202 18:13:33.698678 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-55f57/crc-debug-24mmp"] Feb 02 18:13:34 crc kubenswrapper[4835]: I0202 18:13:34.773234 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-24mmp" Feb 02 18:13:34 crc kubenswrapper[4835]: I0202 18:13:34.856705 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7v2b\" (UniqueName: \"kubernetes.io/projected/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-kube-api-access-j7v2b\") pod \"a6690cb1-678b-4ff3-913c-bc61aed9ef7d\" (UID: \"a6690cb1-678b-4ff3-913c-bc61aed9ef7d\") " Feb 02 18:13:34 crc kubenswrapper[4835]: I0202 18:13:34.856753 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-host\") pod \"a6690cb1-678b-4ff3-913c-bc61aed9ef7d\" (UID: \"a6690cb1-678b-4ff3-913c-bc61aed9ef7d\") " Feb 02 18:13:34 crc kubenswrapper[4835]: I0202 18:13:34.857188 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-host" (OuterVolumeSpecName: "host") pod "a6690cb1-678b-4ff3-913c-bc61aed9ef7d" (UID: "a6690cb1-678b-4ff3-913c-bc61aed9ef7d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 18:13:34 crc kubenswrapper[4835]: I0202 18:13:34.863676 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-kube-api-access-j7v2b" (OuterVolumeSpecName: "kube-api-access-j7v2b") pod "a6690cb1-678b-4ff3-913c-bc61aed9ef7d" (UID: "a6690cb1-678b-4ff3-913c-bc61aed9ef7d"). InnerVolumeSpecName "kube-api-access-j7v2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:13:34 crc kubenswrapper[4835]: I0202 18:13:34.958990 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7v2b\" (UniqueName: \"kubernetes.io/projected/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-kube-api-access-j7v2b\") on node \"crc\" DevicePath \"\"" Feb 02 18:13:34 crc kubenswrapper[4835]: I0202 18:13:34.959026 4835 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6690cb1-678b-4ff3-913c-bc61aed9ef7d-host\") on node \"crc\" DevicePath \"\"" Feb 02 18:13:35 crc kubenswrapper[4835]: I0202 18:13:35.201142 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6690cb1-678b-4ff3-913c-bc61aed9ef7d" path="/var/lib/kubelet/pods/a6690cb1-678b-4ff3-913c-bc61aed9ef7d/volumes" Feb 02 18:13:35 crc kubenswrapper[4835]: I0202 18:13:35.672084 4835 scope.go:117] "RemoveContainer" containerID="c276c0add7054ce1c52e95036c166b02e4010e562ec48c02e63f2e8fbcab16a9" Feb 02 18:13:35 crc kubenswrapper[4835]: I0202 18:13:35.672118 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/crc-debug-24mmp" Feb 02 18:13:44 crc kubenswrapper[4835]: I0202 18:13:44.870361 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:13:44 crc kubenswrapper[4835]: I0202 18:13:44.870923 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:14:14 crc kubenswrapper[4835]: I0202 18:14:14.870713 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:14:14 crc kubenswrapper[4835]: I0202 18:14:14.872399 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:14:22 crc kubenswrapper[4835]: I0202 18:14:22.105042 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-788b5b9b58-9wmkc_7466e48d-b9d4-4a34-917c-5ddd649eaac9/barbican-api/0.log" Feb 02 18:14:22 crc kubenswrapper[4835]: I0202 18:14:22.348185 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6f74b59756-mvv58_34fd3f27-2fa7-4a00-8389-97ac4ce31e33/barbican-keystone-listener/0.log" Feb 02 18:14:22 crc kubenswrapper[4835]: I0202 18:14:22.400877 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-788b5b9b58-9wmkc_7466e48d-b9d4-4a34-917c-5ddd649eaac9/barbican-api-log/0.log" Feb 02 18:14:22 crc kubenswrapper[4835]: I0202 18:14:22.581622 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6f74b59756-mvv58_34fd3f27-2fa7-4a00-8389-97ac4ce31e33/barbican-keystone-listener-log/0.log" Feb 02 18:14:22 crc kubenswrapper[4835]: I0202 18:14:22.615611 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5c47cddbff-wsm2t_79900da5-f2b8-4e39-8a30-feefcfec5a04/barbican-worker/0.log" Feb 02 18:14:22 crc kubenswrapper[4835]: I0202 18:14:22.658169 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5c47cddbff-wsm2t_79900da5-f2b8-4e39-8a30-feefcfec5a04/barbican-worker-log/0.log" Feb 02 18:14:22 crc kubenswrapper[4835]: I0202 18:14:22.831444 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-j27cw_29d9c1e8-035d-485c-bbfa-2c0328468c6a/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:22 crc kubenswrapper[4835]: I0202 18:14:22.895079 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_05826cd2-708f-4ce4-bbfb-04a0e6206c12/ceilometer-central-agent/0.log" Feb 02 18:14:23 crc kubenswrapper[4835]: I0202 18:14:23.021605 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_05826cd2-708f-4ce4-bbfb-04a0e6206c12/ceilometer-notification-agent/0.log" Feb 02 18:14:23 crc kubenswrapper[4835]: I0202 18:14:23.035507 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_05826cd2-708f-4ce4-bbfb-04a0e6206c12/proxy-httpd/0.log" Feb 02 18:14:23 crc kubenswrapper[4835]: I0202 18:14:23.136620 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_05826cd2-708f-4ce4-bbfb-04a0e6206c12/sg-core/0.log" Feb 02 18:14:23 crc kubenswrapper[4835]: I0202 18:14:23.219295 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-srhj7_7703d310-723f-40a8-bae2-d11570ea275b/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:23 crc kubenswrapper[4835]: I0202 18:14:23.362477 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-r6fs8_644fa065-6ba2-4813-84c8-c8f3d8da2971/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:23 crc kubenswrapper[4835]: I0202 18:14:23.932668 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_ef67b40a-7472-4011-95ad-4713b23bf160/probe/0.log" Feb 02 18:14:24 crc kubenswrapper[4835]: I0202 18:14:24.104032 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_99971416-88df-48dd-9e3a-91874214a8b6/cinder-api/0.log" Feb 02 18:14:24 crc kubenswrapper[4835]: I0202 18:14:24.120910 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_99971416-88df-48dd-9e3a-91874214a8b6/cinder-api-log/0.log" Feb 02 18:14:24 crc kubenswrapper[4835]: I0202 18:14:24.423431 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9c6fe27c-e17a-4f0f-bc50-21b8d1b49081/cinder-scheduler/0.log" Feb 02 18:14:24 crc kubenswrapper[4835]: I0202 18:14:24.457530 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9c6fe27c-e17a-4f0f-bc50-21b8d1b49081/probe/0.log" Feb 02 18:14:24 crc kubenswrapper[4835]: I0202 18:14:24.727232 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_4b1fb0f8-db78-42d9-82e2-c0dcda0cd231/probe/0.log" Feb 02 18:14:24 crc kubenswrapper[4835]: I0202 18:14:24.968429 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-7shcq_87617dd5-12a8-49cc-867a-aa0f2d0db447/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:25 crc kubenswrapper[4835]: I0202 18:14:25.219001 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-rsr7p_1125f088-790d-4b32-831f-970cba6dc015/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:25 crc kubenswrapper[4835]: I0202 18:14:25.430208 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69655fd4bf-zddfv_12c4e956-4456-4f8e-b802-1db95f550d51/init/0.log" Feb 02 18:14:25 crc kubenswrapper[4835]: I0202 18:14:25.580362 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_ef67b40a-7472-4011-95ad-4713b23bf160/cinder-backup/0.log" Feb 02 18:14:25 crc kubenswrapper[4835]: I0202 18:14:25.784186 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69655fd4bf-zddfv_12c4e956-4456-4f8e-b802-1db95f550d51/init/0.log" Feb 02 18:14:25 crc kubenswrapper[4835]: I0202 18:14:25.822451 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69655fd4bf-zddfv_12c4e956-4456-4f8e-b802-1db95f550d51/dnsmasq-dns/0.log" Feb 02 18:14:26 crc kubenswrapper[4835]: I0202 18:14:26.018178 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_0454a882-7982-44f7-8f83-3be157de886a/glance-httpd/0.log" Feb 02 18:14:26 crc kubenswrapper[4835]: I0202 18:14:26.021433 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_0454a882-7982-44f7-8f83-3be157de886a/glance-log/0.log" Feb 02 18:14:26 crc kubenswrapper[4835]: I0202 18:14:26.201203 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_7f0c5f67-a208-4b73-9f8b-c924d61cdf9e/glance-httpd/0.log" Feb 02 18:14:26 crc kubenswrapper[4835]: I0202 18:14:26.245433 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_7f0c5f67-a208-4b73-9f8b-c924d61cdf9e/glance-log/0.log" Feb 02 18:14:26 crc kubenswrapper[4835]: I0202 18:14:26.458306 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5f575cdbb6-2fppg_fec30fb3-23dc-4443-a90f-4fb8defb3a1f/horizon/0.log" Feb 02 18:14:26 crc kubenswrapper[4835]: I0202 18:14:26.559889 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-h9gsz_58ba2cea-000b-458c-bb8f-c3f693512a30/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:26 crc kubenswrapper[4835]: I0202 18:14:26.769980 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-4zj54_c814521e-9a8e-41bd-8eb9-05990dbe267f/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:26 crc kubenswrapper[4835]: I0202 18:14:26.885632 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5f575cdbb6-2fppg_fec30fb3-23dc-4443-a90f-4fb8defb3a1f/horizon-log/0.log" Feb 02 18:14:27 crc kubenswrapper[4835]: I0202 18:14:27.048899 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29500921-22pwk_9d457835-0e10-405d-af73-9ef35d8f24b4/keystone-cron/0.log" Feb 02 18:14:27 crc kubenswrapper[4835]: I0202 18:14:27.185221 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_dc0384ad-df86-4939-8c71-92aff217a691/kube-state-metrics/0.log" Feb 02 18:14:27 crc kubenswrapper[4835]: I0202 18:14:27.708724 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-pbdb7_8ebc7011-6fd1-437b-90dc-38f23dc004f5/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:27 crc kubenswrapper[4835]: I0202 18:14:27.783456 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-577d94f4db-mdlkk_0a44fa56-f689-4268-9973-867224dc13ef/keystone-api/0.log" Feb 02 18:14:27 crc kubenswrapper[4835]: I0202 18:14:27.922895 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_3a4997a1-3860-46d1-ba9f-a81c6800aec9/manila-api/0.log" Feb 02 18:14:28 crc kubenswrapper[4835]: I0202 18:14:28.262492 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_3a4997a1-3860-46d1-ba9f-a81c6800aec9/manila-api-log/0.log" Feb 02 18:14:28 crc kubenswrapper[4835]: I0202 18:14:28.279629 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_4f49a042-6b94-4a36-8607-1eb164147d96/probe/0.log" Feb 02 18:14:28 crc kubenswrapper[4835]: I0202 18:14:28.460193 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_4f49a042-6b94-4a36-8607-1eb164147d96/manila-scheduler/0.log" Feb 02 18:14:28 crc kubenswrapper[4835]: I0202 18:14:28.572545 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_eb3ede9c-1564-450a-b0c5-034c5ff8d285/manila-share/0.log" Feb 02 18:14:28 crc kubenswrapper[4835]: I0202 18:14:28.587463 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_eb3ede9c-1564-450a-b0c5-034c5ff8d285/probe/0.log" Feb 02 18:14:29 crc kubenswrapper[4835]: I0202 18:14:29.137585 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-66f46bdd4f-5p4b9_91d3abaa-c52b-495d-b400-8d7ad6ad28e9/neutron-api/0.log" Feb 02 18:14:29 crc kubenswrapper[4835]: I0202 18:14:29.164531 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-66f46bdd4f-5p4b9_91d3abaa-c52b-495d-b400-8d7ad6ad28e9/neutron-httpd/0.log" Feb 02 18:14:29 crc kubenswrapper[4835]: I0202 18:14:29.318457 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-t9qxx_cb4a4f3f-7bb3-498f-b54c-bf0471877ff5/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:30 crc kubenswrapper[4835]: I0202 18:14:30.721866 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_25c6e2e4-4f57-49a6-a558-92106e3f4856/nova-api-log/0.log" Feb 02 18:14:30 crc kubenswrapper[4835]: I0202 18:14:30.922214 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_fb71347d-bf06-4685-809c-a20715adc072/nova-cell0-conductor-conductor/0.log" Feb 02 18:14:31 crc kubenswrapper[4835]: I0202 18:14:31.355594 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_7572ca57-a7e7-4025-8688-de2e52ece174/nova-cell1-conductor-conductor/0.log" Feb 02 18:14:31 crc kubenswrapper[4835]: I0202 18:14:31.460010 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_25c6e2e4-4f57-49a6-a558-92106e3f4856/nova-api-api/0.log" Feb 02 18:14:31 crc kubenswrapper[4835]: I0202 18:14:31.550547 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_dde1c61e-1816-44bc-b1bc-9e1545987087/nova-cell1-novncproxy-novncproxy/0.log" Feb 02 18:14:31 crc kubenswrapper[4835]: I0202 18:14:31.735329 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tmhb7_224a86ad-9920-4e35-8470-e48d3af63934/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:31 crc kubenswrapper[4835]: I0202 18:14:31.931473 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b112a741-ef20-4e18-a161-01ed24d9b5da/nova-metadata-log/0.log" Feb 02 18:14:32 crc kubenswrapper[4835]: I0202 18:14:32.461024 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_a4594b86-bb25-4c6b-922e-ecc018bf4081/nova-scheduler-scheduler/0.log" Feb 02 18:14:32 crc kubenswrapper[4835]: I0202 18:14:32.521560 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b630cc52-70e1-4580-8d73-df2507194554/mysql-bootstrap/0.log" Feb 02 18:14:32 crc kubenswrapper[4835]: I0202 18:14:32.691212 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b630cc52-70e1-4580-8d73-df2507194554/mysql-bootstrap/0.log" Feb 02 18:14:32 crc kubenswrapper[4835]: I0202 18:14:32.729960 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b630cc52-70e1-4580-8d73-df2507194554/galera/0.log" Feb 02 18:14:32 crc kubenswrapper[4835]: I0202 18:14:32.960808 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_eec68dd7-cf6a-45a4-a036-19bcf050c892/mysql-bootstrap/0.log" Feb 02 18:14:33 crc kubenswrapper[4835]: I0202 18:14:33.140316 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_eec68dd7-cf6a-45a4-a036-19bcf050c892/mysql-bootstrap/0.log" Feb 02 18:14:33 crc kubenswrapper[4835]: I0202 18:14:33.186425 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_eec68dd7-cf6a-45a4-a036-19bcf050c892/galera/0.log" Feb 02 18:14:33 crc kubenswrapper[4835]: I0202 18:14:33.415702 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_a73ab577-2970-4e91-bbde-344bd924ba2c/openstackclient/0.log" Feb 02 18:14:33 crc kubenswrapper[4835]: I0202 18:14:33.610363 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_4b1fb0f8-db78-42d9-82e2-c0dcda0cd231/cinder-volume/0.log" Feb 02 18:14:33 crc kubenswrapper[4835]: I0202 18:14:33.628012 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-hxh6p_e3608c64-7b50-4a57-a0ea-578164629872/ovn-controller/0.log" Feb 02 18:14:33 crc kubenswrapper[4835]: I0202 18:14:33.852907 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-c6jjz_9ce87c37-0b7a-4a7a-b90f-f34aaa078035/openstack-network-exporter/0.log" Feb 02 18:14:34 crc kubenswrapper[4835]: I0202 18:14:34.078628 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-248s6_89395ae4-5378-4709-a8b2-5b412e709142/ovsdb-server-init/0.log" Feb 02 18:14:34 crc kubenswrapper[4835]: I0202 18:14:34.254630 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b112a741-ef20-4e18-a161-01ed24d9b5da/nova-metadata-metadata/0.log" Feb 02 18:14:34 crc kubenswrapper[4835]: I0202 18:14:34.270986 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-248s6_89395ae4-5378-4709-a8b2-5b412e709142/ovs-vswitchd/0.log" Feb 02 18:14:34 crc kubenswrapper[4835]: I0202 18:14:34.297443 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-248s6_89395ae4-5378-4709-a8b2-5b412e709142/ovsdb-server-init/0.log" Feb 02 18:14:34 crc kubenswrapper[4835]: I0202 18:14:34.331584 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-248s6_89395ae4-5378-4709-a8b2-5b412e709142/ovsdb-server/0.log" Feb 02 18:14:34 crc kubenswrapper[4835]: I0202 18:14:34.500523 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-5f2k5_51a0827f-7d93-4cd6-b0e3-aa16bdb6dbf2/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:34 crc kubenswrapper[4835]: I0202 18:14:34.522422 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a9b8ae61-599b-4f97-84a8-6af5a6e37e52/openstack-network-exporter/0.log" Feb 02 18:14:34 crc kubenswrapper[4835]: I0202 18:14:34.853106 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_4d2f7d44-7579-4cd7-867c-77a46a7296cc/ovsdbserver-nb/0.log" Feb 02 18:14:34 crc kubenswrapper[4835]: I0202 18:14:34.858146 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a9b8ae61-599b-4f97-84a8-6af5a6e37e52/ovn-northd/0.log" Feb 02 18:14:34 crc kubenswrapper[4835]: I0202 18:14:34.883888 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_4d2f7d44-7579-4cd7-867c-77a46a7296cc/openstack-network-exporter/0.log" Feb 02 18:14:35 crc kubenswrapper[4835]: I0202 18:14:35.047318 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_03234de4-e1af-4911-93b4-6da716177367/openstack-network-exporter/0.log" Feb 02 18:14:35 crc kubenswrapper[4835]: I0202 18:14:35.156044 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_03234de4-e1af-4911-93b4-6da716177367/ovsdbserver-sb/0.log" Feb 02 18:14:35 crc kubenswrapper[4835]: I0202 18:14:35.407498 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e7014da0-d4d6-4279-9f39-e50a4bbcdda5/setup-container/0.log" Feb 02 18:14:35 crc kubenswrapper[4835]: I0202 18:14:35.425168 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-b8f544fd4-zp9bk_b37e6604-22e9-4e3d-8b9e-27ac0fccad12/placement-api/0.log" Feb 02 18:14:35 crc kubenswrapper[4835]: I0202 18:14:35.445906 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-b8f544fd4-zp9bk_b37e6604-22e9-4e3d-8b9e-27ac0fccad12/placement-log/0.log" Feb 02 18:14:35 crc kubenswrapper[4835]: I0202 18:14:35.674346 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e7014da0-d4d6-4279-9f39-e50a4bbcdda5/setup-container/0.log" Feb 02 18:14:35 crc kubenswrapper[4835]: I0202 18:14:35.699121 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450/setup-container/0.log" Feb 02 18:14:35 crc kubenswrapper[4835]: I0202 18:14:35.784366 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e7014da0-d4d6-4279-9f39-e50a4bbcdda5/rabbitmq/0.log" Feb 02 18:14:35 crc kubenswrapper[4835]: I0202 18:14:35.930221 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450/rabbitmq/0.log" Feb 02 18:14:36 crc kubenswrapper[4835]: I0202 18:14:36.009710 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_cf1f6e5c-fe3f-4be0-9931-dd3aa5a61450/setup-container/0.log" Feb 02 18:14:36 crc kubenswrapper[4835]: I0202 18:14:36.035765 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-7jvfj_7f86d2bc-c7cf-42c8-b62a-828961f9e880/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:36 crc kubenswrapper[4835]: I0202 18:14:36.182106 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-s56k6_fbf199f3-f350-4171-ad1a-0eb83e623e22/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:36 crc kubenswrapper[4835]: I0202 18:14:36.328583 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-5227h_c6aee2d4-013e-4ac6-a7f0-f5f640c724ed/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:36 crc kubenswrapper[4835]: I0202 18:14:36.418177 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-jm7mm_ad7d4ee0-52f4-4d22-a285-d1ca6d76ccd2/ssh-known-hosts-edpm-deployment/0.log" Feb 02 18:14:36 crc kubenswrapper[4835]: I0202 18:14:36.624886 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_0cfd7d28-c17f-4035-bd42-89b10e3c60eb/tempest-tests-tempest-tests-runner/0.log" Feb 02 18:14:36 crc kubenswrapper[4835]: I0202 18:14:36.728302 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_538dadbd-5539-459e-9939-f078b6bdda38/test-operator-logs-container/0.log" Feb 02 18:14:36 crc kubenswrapper[4835]: I0202 18:14:36.875951 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-s6cjm_ae2d5259-fd96-4127-8bf9-ddba82deadf6/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 18:14:44 crc kubenswrapper[4835]: I0202 18:14:44.871416 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:14:44 crc kubenswrapper[4835]: I0202 18:14:44.872067 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:14:44 crc kubenswrapper[4835]: I0202 18:14:44.872134 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 18:14:44 crc kubenswrapper[4835]: I0202 18:14:44.873368 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6de61775b6dfa0919621a117267aee27e136e485973852f07aa114fdff41e432"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 18:14:44 crc kubenswrapper[4835]: I0202 18:14:44.873445 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://6de61775b6dfa0919621a117267aee27e136e485973852f07aa114fdff41e432" gracePeriod=600 Feb 02 18:14:45 crc kubenswrapper[4835]: I0202 18:14:45.328534 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="6de61775b6dfa0919621a117267aee27e136e485973852f07aa114fdff41e432" exitCode=0 Feb 02 18:14:45 crc kubenswrapper[4835]: I0202 18:14:45.328790 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"6de61775b6dfa0919621a117267aee27e136e485973852f07aa114fdff41e432"} Feb 02 18:14:45 crc kubenswrapper[4835]: I0202 18:14:45.328826 4835 scope.go:117] "RemoveContainer" containerID="13ff247159f82f1e65727404ecad35a0569b4966470985c3ce960a005e3a90bf" Feb 02 18:14:45 crc kubenswrapper[4835]: I0202 18:14:45.748317 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_a5b7fac0-4bb3-4138-9618-96bf25cbdde5/memcached/0.log" Feb 02 18:14:46 crc kubenswrapper[4835]: I0202 18:14:46.339863 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad"} Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.523741 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lhdsm"] Feb 02 18:14:51 crc kubenswrapper[4835]: E0202 18:14:51.525842 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6690cb1-678b-4ff3-913c-bc61aed9ef7d" containerName="container-00" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.525946 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6690cb1-678b-4ff3-913c-bc61aed9ef7d" containerName="container-00" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.526198 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6690cb1-678b-4ff3-913c-bc61aed9ef7d" containerName="container-00" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.527627 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.538635 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lhdsm"] Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.588340 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-catalog-content\") pod \"redhat-operators-lhdsm\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.588674 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxvqs\" (UniqueName: \"kubernetes.io/projected/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-kube-api-access-fxvqs\") pod \"redhat-operators-lhdsm\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.588808 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-utilities\") pod \"redhat-operators-lhdsm\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.690409 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-utilities\") pod \"redhat-operators-lhdsm\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.690561 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-catalog-content\") pod \"redhat-operators-lhdsm\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.690595 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxvqs\" (UniqueName: \"kubernetes.io/projected/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-kube-api-access-fxvqs\") pod \"redhat-operators-lhdsm\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.691546 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-utilities\") pod \"redhat-operators-lhdsm\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.691605 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-catalog-content\") pod \"redhat-operators-lhdsm\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.721916 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxvqs\" (UniqueName: \"kubernetes.io/projected/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-kube-api-access-fxvqs\") pod \"redhat-operators-lhdsm\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:14:51 crc kubenswrapper[4835]: I0202 18:14:51.849001 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:14:52 crc kubenswrapper[4835]: I0202 18:14:52.397491 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lhdsm"] Feb 02 18:14:53 crc kubenswrapper[4835]: I0202 18:14:53.404514 4835 generic.go:334] "Generic (PLEG): container finished" podID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerID="2e155d9b296af6ec072fe6e73b7d68ccda4959aa151193ff2cb60e0e1d093568" exitCode=0 Feb 02 18:14:53 crc kubenswrapper[4835]: I0202 18:14:53.404690 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdsm" event={"ID":"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf","Type":"ContainerDied","Data":"2e155d9b296af6ec072fe6e73b7d68ccda4959aa151193ff2cb60e0e1d093568"} Feb 02 18:14:53 crc kubenswrapper[4835]: I0202 18:14:53.404854 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdsm" event={"ID":"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf","Type":"ContainerStarted","Data":"437aab51cf856631b1fcbd7889242ef76fe6a375c65f5b4540501dec7f42c248"} Feb 02 18:14:53 crc kubenswrapper[4835]: I0202 18:14:53.406511 4835 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 18:14:54 crc kubenswrapper[4835]: I0202 18:14:54.419581 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdsm" event={"ID":"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf","Type":"ContainerStarted","Data":"ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad"} Feb 02 18:14:58 crc kubenswrapper[4835]: I0202 18:14:58.463255 4835 generic.go:334] "Generic (PLEG): container finished" podID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerID="ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad" exitCode=0 Feb 02 18:14:58 crc kubenswrapper[4835]: I0202 18:14:58.463339 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdsm" event={"ID":"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf","Type":"ContainerDied","Data":"ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad"} Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.152914 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc"] Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.154963 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.157032 4835 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.158041 4835 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.164517 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc"] Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.292068 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh9tx\" (UniqueName: \"kubernetes.io/projected/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-kube-api-access-gh9tx\") pod \"collect-profiles-29500935-d2hgc\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.292148 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-secret-volume\") pod \"collect-profiles-29500935-d2hgc\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.292339 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-config-volume\") pod \"collect-profiles-29500935-d2hgc\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.394433 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-config-volume\") pod \"collect-profiles-29500935-d2hgc\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.394556 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh9tx\" (UniqueName: \"kubernetes.io/projected/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-kube-api-access-gh9tx\") pod \"collect-profiles-29500935-d2hgc\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.394604 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-secret-volume\") pod \"collect-profiles-29500935-d2hgc\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.395566 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-config-volume\") pod \"collect-profiles-29500935-d2hgc\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.402309 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-secret-volume\") pod \"collect-profiles-29500935-d2hgc\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.413941 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh9tx\" (UniqueName: \"kubernetes.io/projected/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-kube-api-access-gh9tx\") pod \"collect-profiles-29500935-d2hgc\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.479472 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.490325 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdsm" event={"ID":"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf","Type":"ContainerStarted","Data":"907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a"} Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.531074 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lhdsm" podStartSLOduration=4.052171341 podStartE2EDuration="9.531045826s" podCreationTimestamp="2026-02-02 18:14:51 +0000 UTC" firstStartedPulling="2026-02-02 18:14:53.406167202 +0000 UTC m=+5085.027771282" lastFinishedPulling="2026-02-02 18:14:58.885041687 +0000 UTC m=+5090.506645767" observedRunningTime="2026-02-02 18:15:00.516656219 +0000 UTC m=+5092.138260319" watchObservedRunningTime="2026-02-02 18:15:00.531045826 +0000 UTC m=+5092.152649906" Feb 02 18:15:00 crc kubenswrapper[4835]: I0202 18:15:00.987502 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc"] Feb 02 18:15:01 crc kubenswrapper[4835]: W0202 18:15:01.002140 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode05d608a_9d3c_48c6_8aa7_8944bea12ff7.slice/crio-72511bbdddcd25addb2b1713fcdf7804c3b8f214bbb39c75610478349fdf8f5f WatchSource:0}: Error finding container 72511bbdddcd25addb2b1713fcdf7804c3b8f214bbb39c75610478349fdf8f5f: Status 404 returned error can't find the container with id 72511bbdddcd25addb2b1713fcdf7804c3b8f214bbb39c75610478349fdf8f5f Feb 02 18:15:01 crc kubenswrapper[4835]: I0202 18:15:01.508625 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" event={"ID":"e05d608a-9d3c-48c6-8aa7-8944bea12ff7","Type":"ContainerStarted","Data":"748670a69e842fc04220ee4d93013865a5e33cbc400f1fff6c004de171024974"} Feb 02 18:15:01 crc kubenswrapper[4835]: I0202 18:15:01.509075 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" event={"ID":"e05d608a-9d3c-48c6-8aa7-8944bea12ff7","Type":"ContainerStarted","Data":"72511bbdddcd25addb2b1713fcdf7804c3b8f214bbb39c75610478349fdf8f5f"} Feb 02 18:15:01 crc kubenswrapper[4835]: I0202 18:15:01.535567 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" podStartSLOduration=1.535543995 podStartE2EDuration="1.535543995s" podCreationTimestamp="2026-02-02 18:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 18:15:01.527308202 +0000 UTC m=+5093.148912282" watchObservedRunningTime="2026-02-02 18:15:01.535543995 +0000 UTC m=+5093.157148075" Feb 02 18:15:01 crc kubenswrapper[4835]: I0202 18:15:01.851479 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:15:01 crc kubenswrapper[4835]: I0202 18:15:01.852570 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:15:02 crc kubenswrapper[4835]: I0202 18:15:02.520410 4835 generic.go:334] "Generic (PLEG): container finished" podID="e05d608a-9d3c-48c6-8aa7-8944bea12ff7" containerID="748670a69e842fc04220ee4d93013865a5e33cbc400f1fff6c004de171024974" exitCode=0 Feb 02 18:15:02 crc kubenswrapper[4835]: I0202 18:15:02.520520 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" event={"ID":"e05d608a-9d3c-48c6-8aa7-8944bea12ff7","Type":"ContainerDied","Data":"748670a69e842fc04220ee4d93013865a5e33cbc400f1fff6c004de171024974"} Feb 02 18:15:02 crc kubenswrapper[4835]: I0202 18:15:02.908020 4835 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lhdsm" podUID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerName="registry-server" probeResult="failure" output=< Feb 02 18:15:02 crc kubenswrapper[4835]: timeout: failed to connect service ":50051" within 1s Feb 02 18:15:02 crc kubenswrapper[4835]: > Feb 02 18:15:03 crc kubenswrapper[4835]: I0202 18:15:03.935689 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.081761 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-secret-volume\") pod \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.081828 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh9tx\" (UniqueName: \"kubernetes.io/projected/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-kube-api-access-gh9tx\") pod \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.081884 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-config-volume\") pod \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\" (UID: \"e05d608a-9d3c-48c6-8aa7-8944bea12ff7\") " Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.083068 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-config-volume" (OuterVolumeSpecName: "config-volume") pod "e05d608a-9d3c-48c6-8aa7-8944bea12ff7" (UID: "e05d608a-9d3c-48c6-8aa7-8944bea12ff7"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.087678 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e05d608a-9d3c-48c6-8aa7-8944bea12ff7" (UID: "e05d608a-9d3c-48c6-8aa7-8944bea12ff7"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.088198 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-kube-api-access-gh9tx" (OuterVolumeSpecName: "kube-api-access-gh9tx") pod "e05d608a-9d3c-48c6-8aa7-8944bea12ff7" (UID: "e05d608a-9d3c-48c6-8aa7-8944bea12ff7"). InnerVolumeSpecName "kube-api-access-gh9tx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.184633 4835 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.184666 4835 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.184677 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh9tx\" (UniqueName: \"kubernetes.io/projected/e05d608a-9d3c-48c6-8aa7-8944bea12ff7-kube-api-access-gh9tx\") on node \"crc\" DevicePath \"\"" Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.538438 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" event={"ID":"e05d608a-9d3c-48c6-8aa7-8944bea12ff7","Type":"ContainerDied","Data":"72511bbdddcd25addb2b1713fcdf7804c3b8f214bbb39c75610478349fdf8f5f"} Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.538483 4835 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72511bbdddcd25addb2b1713fcdf7804c3b8f214bbb39c75610478349fdf8f5f" Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.538543 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500935-d2hgc" Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.613658 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr"] Feb 02 18:15:04 crc kubenswrapper[4835]: I0202 18:15:04.622670 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500890-wzxfr"] Feb 02 18:15:05 crc kubenswrapper[4835]: I0202 18:15:05.199528 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8593a4fc-619d-4fce-810a-252425f1629c" path="/var/lib/kubelet/pods/8593a4fc-619d-4fce-810a-252425f1629c/volumes" Feb 02 18:15:09 crc kubenswrapper[4835]: I0202 18:15:09.927325 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/util/0.log" Feb 02 18:15:10 crc kubenswrapper[4835]: I0202 18:15:10.180944 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/pull/0.log" Feb 02 18:15:10 crc kubenswrapper[4835]: I0202 18:15:10.201470 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/util/0.log" Feb 02 18:15:10 crc kubenswrapper[4835]: I0202 18:15:10.274598 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/pull/0.log" Feb 02 18:15:10 crc kubenswrapper[4835]: I0202 18:15:10.522981 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/pull/0.log" Feb 02 18:15:10 crc kubenswrapper[4835]: I0202 18:15:10.535319 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/util/0.log" Feb 02 18:15:10 crc kubenswrapper[4835]: I0202 18:15:10.558268 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ed51ae67d3d61c0ef0070bbe487755130370507cd94cfabcb0a83ddb2qr5sd_dbcee8eb-eae6-490a-be35-8b24fef3ed83/extract/0.log" Feb 02 18:15:10 crc kubenswrapper[4835]: I0202 18:15:10.907703 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-fc589b45f-wlvlw_ccf3b51e-9298-4a5e-ad19-feac0a171056/manager/0.log" Feb 02 18:15:11 crc kubenswrapper[4835]: I0202 18:15:11.195749 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-8f4c5cb64-2smkw_2fa52615-07a4-47bc-8a7c-62565638964e/manager/0.log" Feb 02 18:15:11 crc kubenswrapper[4835]: I0202 18:15:11.379000 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5d77f4dbc9-l2d9w_051510bb-9754-4866-932d-53e8f209af3e/manager/0.log" Feb 02 18:15:11 crc kubenswrapper[4835]: I0202 18:15:11.511638 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-65dc6c8d9c-sf4fj_8d738981-de82-4d01-a295-b14401942841/manager/0.log" Feb 02 18:15:11 crc kubenswrapper[4835]: I0202 18:15:11.697673 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-dpqkl_bfabfeb6-c3aa-4684-8a0a-c53b92a3a8cf/manager/0.log" Feb 02 18:15:11 crc kubenswrapper[4835]: I0202 18:15:11.908131 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:15:11 crc kubenswrapper[4835]: I0202 18:15:11.957128 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:15:12 crc kubenswrapper[4835]: I0202 18:15:12.112260 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-87bd9d46f-5bvq9_668fc23c-0c08-4f7e-839d-6fbcf5f6554d/manager/0.log" Feb 02 18:15:12 crc kubenswrapper[4835]: I0202 18:15:12.164815 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lhdsm"] Feb 02 18:15:12 crc kubenswrapper[4835]: I0202 18:15:12.422834 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-xx8fb_cb34a8e8-0047-450d-898b-56164cd6f8c3/manager/0.log" Feb 02 18:15:12 crc kubenswrapper[4835]: I0202 18:15:12.562285 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-64469b487f-xdk9w_487c0b98-8b52-47fd-84ff-6637b6d79c8c/manager/0.log" Feb 02 18:15:12 crc kubenswrapper[4835]: I0202 18:15:12.759366 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7775d87d9d-knb4g_60282c99-48f4-4c72-92d2-c92b6720bcf7/manager/0.log" Feb 02 18:15:12 crc kubenswrapper[4835]: I0202 18:15:12.823661 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-5968f45b79-bhwd9_867a5e63-f2c8-45fe-a65a-a8c3d11de2b3/manager/0.log" Feb 02 18:15:12 crc kubenswrapper[4835]: I0202 18:15:12.958594 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-mt62w_9225b13b-9f7b-4e74-8fb2-1cdf6a3a7ce2/manager/0.log" Feb 02 18:15:13 crc kubenswrapper[4835]: I0202 18:15:13.249214 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-576995988b-pd7lc_a2b75f19-bcbe-4f09-9652-70f042d4bc29/manager/0.log" Feb 02 18:15:13 crc kubenswrapper[4835]: I0202 18:15:13.468614 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5644b66645-89lbp_ac2c47dc-967c-456e-affc-bb3c4ac5b6d0/manager/0.log" Feb 02 18:15:13 crc kubenswrapper[4835]: I0202 18:15:13.608207 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lhdsm" podUID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerName="registry-server" containerID="cri-o://907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a" gracePeriod=2 Feb 02 18:15:13 crc kubenswrapper[4835]: I0202 18:15:13.714311 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-59c4b45c4dqmknq_eee7ce8b-cbaf-48ff-80d8-92011b4a11fa/manager/0.log" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.183200 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-6d857fbf88-k28lf_2f7d609f-2d42-4252-912a-ccae13d46f7f/operator/0.log" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.192298 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.296012 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxvqs\" (UniqueName: \"kubernetes.io/projected/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-kube-api-access-fxvqs\") pod \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.296134 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-utilities\") pod \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.296213 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-catalog-content\") pod \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\" (UID: \"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf\") " Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.297621 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-utilities" (OuterVolumeSpecName: "utilities") pod "2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" (UID: "2c6595c9-63e7-4395-9aaa-d0db09cf2cbf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.321025 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-kube-api-access-fxvqs" (OuterVolumeSpecName: "kube-api-access-fxvqs") pod "2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" (UID: "2c6595c9-63e7-4395-9aaa-d0db09cf2cbf"). InnerVolumeSpecName "kube-api-access-fxvqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.398665 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxvqs\" (UniqueName: \"kubernetes.io/projected/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-kube-api-access-fxvqs\") on node \"crc\" DevicePath \"\"" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.398694 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.438607 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" (UID: "2c6595c9-63e7-4395-9aaa-d0db09cf2cbf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.498813 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-chdtk_4a0bb1dd-84ba-4d22-812d-b76e81c5b054/registry-server/0.log" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.499951 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.622337 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lhdsm" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.622193 4835 generic.go:334] "Generic (PLEG): container finished" podID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerID="907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a" exitCode=0 Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.622940 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdsm" event={"ID":"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf","Type":"ContainerDied","Data":"907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a"} Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.622981 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdsm" event={"ID":"2c6595c9-63e7-4395-9aaa-d0db09cf2cbf","Type":"ContainerDied","Data":"437aab51cf856631b1fcbd7889242ef76fe6a375c65f5b4540501dec7f42c248"} Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.623004 4835 scope.go:117] "RemoveContainer" containerID="907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.659554 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-ccmdf_3b504454-3ebc-45b8-8e93-fcab1363ce3c/manager/0.log" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.710002 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lhdsm"] Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.710011 4835 scope.go:117] "RemoveContainer" containerID="ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.757819 4835 scope.go:117] "RemoveContainer" containerID="2e155d9b296af6ec072fe6e73b7d68ccda4959aa151193ff2cb60e0e1d093568" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.757875 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lhdsm"] Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.795089 4835 scope.go:117] "RemoveContainer" containerID="907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a" Feb 02 18:15:14 crc kubenswrapper[4835]: E0202 18:15:14.795632 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a\": container with ID starting with 907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a not found: ID does not exist" containerID="907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.796195 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a"} err="failed to get container status \"907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a\": rpc error: code = NotFound desc = could not find container \"907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a\": container with ID starting with 907cf41b0aec05133a2a47996ad48fc8a3938d7f01a2cdbd2482cf9fd559aa7a not found: ID does not exist" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.796236 4835 scope.go:117] "RemoveContainer" containerID="ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad" Feb 02 18:15:14 crc kubenswrapper[4835]: E0202 18:15:14.796763 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad\": container with ID starting with ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad not found: ID does not exist" containerID="ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.796907 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad"} err="failed to get container status \"ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad\": rpc error: code = NotFound desc = could not find container \"ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad\": container with ID starting with ec633a3c96e6b77b3304e966b0261b05f198ac0f66d6d7696e3268124bdaabad not found: ID does not exist" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.796943 4835 scope.go:117] "RemoveContainer" containerID="2e155d9b296af6ec072fe6e73b7d68ccda4959aa151193ff2cb60e0e1d093568" Feb 02 18:15:14 crc kubenswrapper[4835]: E0202 18:15:14.797859 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e155d9b296af6ec072fe6e73b7d68ccda4959aa151193ff2cb60e0e1d093568\": container with ID starting with 2e155d9b296af6ec072fe6e73b7d68ccda4959aa151193ff2cb60e0e1d093568 not found: ID does not exist" containerID="2e155d9b296af6ec072fe6e73b7d68ccda4959aa151193ff2cb60e0e1d093568" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.797905 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e155d9b296af6ec072fe6e73b7d68ccda4959aa151193ff2cb60e0e1d093568"} err="failed to get container status \"2e155d9b296af6ec072fe6e73b7d68ccda4959aa151193ff2cb60e0e1d093568\": rpc error: code = NotFound desc = could not find container \"2e155d9b296af6ec072fe6e73b7d68ccda4959aa151193ff2cb60e0e1d093568\": container with ID starting with 2e155d9b296af6ec072fe6e73b7d68ccda4959aa151193ff2cb60e0e1d093568 not found: ID does not exist" Feb 02 18:15:14 crc kubenswrapper[4835]: I0202 18:15:14.844752 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-kjlbl_af6dd5c2-faa4-407d-b6bc-fffda146240b/manager/0.log" Feb 02 18:15:15 crc kubenswrapper[4835]: I0202 18:15:15.040030 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-wdspf_dc9b2536-2284-4bd8-b803-e6dc90e30016/operator/0.log" Feb 02 18:15:15 crc kubenswrapper[4835]: I0202 18:15:15.206892 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" path="/var/lib/kubelet/pods/2c6595c9-63e7-4395-9aaa-d0db09cf2cbf/volumes" Feb 02 18:15:15 crc kubenswrapper[4835]: I0202 18:15:15.220944 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-7b89fdf75b-lrq25_563720e1-311a-4aea-b34b-e6ab1d5d7f44/manager/0.log" Feb 02 18:15:15 crc kubenswrapper[4835]: I0202 18:15:15.438501 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-565849b54-r2xwk_affef4c6-1369-40e7-882d-e0cc06c7a492/manager/0.log" Feb 02 18:15:15 crc kubenswrapper[4835]: I0202 18:15:15.538930 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-2klm7_eb7ea012-63e3-4108-bb3b-904fd21a7c4c/manager/0.log" Feb 02 18:15:15 crc kubenswrapper[4835]: I0202 18:15:15.616409 4835 scope.go:117] "RemoveContainer" containerID="c6dbeef34c7bdf76c73715f62c3cb20da06bd3cd13e02afd60d0e37199aa80b9" Feb 02 18:15:15 crc kubenswrapper[4835]: I0202 18:15:15.741542 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-586b95b788-rrg8c_83fe7277-43df-4e53-b2e1-20ec1c340289/manager/0.log" Feb 02 18:15:16 crc kubenswrapper[4835]: I0202 18:15:16.151971 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7777b795b7-9mpk4_5166e3f9-91d3-4a6a-a4af-68e5063aa217/manager/0.log" Feb 02 18:15:16 crc kubenswrapper[4835]: I0202 18:15:16.205876 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7b89ddb58-vl7xb_c64313f5-c2dc-4a80-aee6-4c177172598f/manager/0.log" Feb 02 18:15:40 crc kubenswrapper[4835]: I0202 18:15:40.385075 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-txgvr_acabb9c6-7abf-4e6d-bf8c-6ac5b01eb12f/control-plane-machine-set-operator/0.log" Feb 02 18:15:40 crc kubenswrapper[4835]: I0202 18:15:40.410736 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-44vfm_67a8f4cf-ff9c-48ab-92dd-b2e096ab4192/kube-rbac-proxy/0.log" Feb 02 18:15:40 crc kubenswrapper[4835]: I0202 18:15:40.608803 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-44vfm_67a8f4cf-ff9c-48ab-92dd-b2e096ab4192/machine-api-operator/0.log" Feb 02 18:15:55 crc kubenswrapper[4835]: I0202 18:15:55.013542 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-rvjmd_65887296-1b4f-40f4-80f1-9889e34070cc/cert-manager-controller/0.log" Feb 02 18:15:55 crc kubenswrapper[4835]: I0202 18:15:55.161397 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-h5kzj_2d062425-7c9e-48fe-a566-bf101b0349cc/cert-manager-cainjector/0.log" Feb 02 18:15:55 crc kubenswrapper[4835]: I0202 18:15:55.242529 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-jjmjw_1f888664-2f9b-4bd3-bef9-dd8b65a2ab93/cert-manager-webhook/0.log" Feb 02 18:16:10 crc kubenswrapper[4835]: I0202 18:16:10.524755 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-r4xs8_3d3a7f96-7388-4e16-991c-6e99de2387dc/nmstate-console-plugin/0.log" Feb 02 18:16:11 crc kubenswrapper[4835]: I0202 18:16:11.229803 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-9bwnb_b5dbad86-74ef-402c-b0ab-5b48d69e8ecc/nmstate-handler/0.log" Feb 02 18:16:11 crc kubenswrapper[4835]: I0202 18:16:11.232156 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-c5rjp_c615d857-c500-4fe2-b699-97a5d8ce3311/kube-rbac-proxy/0.log" Feb 02 18:16:11 crc kubenswrapper[4835]: I0202 18:16:11.258978 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-c5rjp_c615d857-c500-4fe2-b699-97a5d8ce3311/nmstate-metrics/0.log" Feb 02 18:16:11 crc kubenswrapper[4835]: I0202 18:16:11.399001 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-ck9dl_8abbe167-63ff-48da-ad70-f298a68aab19/nmstate-operator/0.log" Feb 02 18:16:11 crc kubenswrapper[4835]: I0202 18:16:11.459222 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-44zt5_9b89aa5a-f847-42b1-a763-3bdcbcde8158/nmstate-webhook/0.log" Feb 02 18:16:39 crc kubenswrapper[4835]: I0202 18:16:39.394076 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-dsndw_0390332b-c0b4-4a28-b815-69ad9d9bed13/kube-rbac-proxy/0.log" Feb 02 18:16:39 crc kubenswrapper[4835]: I0202 18:16:39.562194 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-dsndw_0390332b-c0b4-4a28-b815-69ad9d9bed13/controller/0.log" Feb 02 18:16:39 crc kubenswrapper[4835]: I0202 18:16:39.670773 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-66lj9_8a3b28b3-6b7a-4f64-b4d0-3af57e2b436e/frr-k8s-webhook-server/0.log" Feb 02 18:16:39 crc kubenswrapper[4835]: I0202 18:16:39.769786 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-frr-files/0.log" Feb 02 18:16:39 crc kubenswrapper[4835]: I0202 18:16:39.914400 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-reloader/0.log" Feb 02 18:16:39 crc kubenswrapper[4835]: I0202 18:16:39.958763 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-metrics/0.log" Feb 02 18:16:39 crc kubenswrapper[4835]: I0202 18:16:39.961427 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-frr-files/0.log" Feb 02 18:16:39 crc kubenswrapper[4835]: I0202 18:16:39.999049 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-reloader/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.189153 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-frr-files/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.231167 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-reloader/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.231541 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-metrics/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.245175 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-metrics/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.393756 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-frr-files/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.424728 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-reloader/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.479848 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/cp-metrics/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.492368 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/controller/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.644608 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/frr-metrics/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.689533 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/kube-rbac-proxy/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.704146 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/kube-rbac-proxy-frr/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.921202 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/reloader/0.log" Feb 02 18:16:40 crc kubenswrapper[4835]: I0202 18:16:40.953507 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-b7878cd68-s889h_846128ff-a92d-40b9-835b-3184cb35de48/manager/0.log" Feb 02 18:16:41 crc kubenswrapper[4835]: I0202 18:16:41.162902 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5f9664b4df-s2qs9_5c1b1958-8e0e-40bd-9325-b7e6e4aa4a73/webhook-server/0.log" Feb 02 18:16:41 crc kubenswrapper[4835]: I0202 18:16:41.537546 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-glqbw_9b37cd70-fe2e-406b-a1f2-5aade78f75e4/kube-rbac-proxy/0.log" Feb 02 18:16:41 crc kubenswrapper[4835]: I0202 18:16:41.920409 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-glqbw_9b37cd70-fe2e-406b-a1f2-5aade78f75e4/speaker/0.log" Feb 02 18:16:42 crc kubenswrapper[4835]: I0202 18:16:42.436782 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xfhbb_9308a217-4e09-4f60-a7d8-698cde044a53/frr/0.log" Feb 02 18:16:55 crc kubenswrapper[4835]: I0202 18:16:55.211200 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/util/0.log" Feb 02 18:16:55 crc kubenswrapper[4835]: I0202 18:16:55.351825 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/util/0.log" Feb 02 18:16:55 crc kubenswrapper[4835]: I0202 18:16:55.417602 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/pull/0.log" Feb 02 18:16:55 crc kubenswrapper[4835]: I0202 18:16:55.443884 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/pull/0.log" Feb 02 18:16:55 crc kubenswrapper[4835]: I0202 18:16:55.642584 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/util/0.log" Feb 02 18:16:55 crc kubenswrapper[4835]: I0202 18:16:55.648531 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/pull/0.log" Feb 02 18:16:55 crc kubenswrapper[4835]: I0202 18:16:55.655043 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dctsnll_695aef87-f06b-45ea-a3c1-aadf175760b5/extract/0.log" Feb 02 18:16:55 crc kubenswrapper[4835]: I0202 18:16:55.834991 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/util/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.030939 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/pull/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.033837 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/util/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.061585 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/pull/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.185728 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/util/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.223406 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/pull/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.241101 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713fpkcc_3d697674-78a8-4c19-96a7-5aea46402c5e/extract/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.392794 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-utilities/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.555347 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-content/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.565525 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-content/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.586133 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-utilities/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.769107 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-content/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.805591 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/extract-utilities/0.log" Feb 02 18:16:56 crc kubenswrapper[4835]: I0202 18:16:56.969387 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-utilities/0.log" Feb 02 18:16:57 crc kubenswrapper[4835]: I0202 18:16:57.238209 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-utilities/0.log" Feb 02 18:16:57 crc kubenswrapper[4835]: I0202 18:16:57.342949 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-content/0.log" Feb 02 18:16:57 crc kubenswrapper[4835]: I0202 18:16:57.388727 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-content/0.log" Feb 02 18:16:57 crc kubenswrapper[4835]: I0202 18:16:57.577440 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4lbw6_a48095d7-2712-4ce3-ac7e-0fb66f641e9f/registry-server/0.log" Feb 02 18:16:57 crc kubenswrapper[4835]: I0202 18:16:57.709582 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-utilities/0.log" Feb 02 18:16:57 crc kubenswrapper[4835]: I0202 18:16:57.727298 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/extract-content/0.log" Feb 02 18:16:57 crc kubenswrapper[4835]: I0202 18:16:57.917663 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-lmjmd_624437d3-bcc0-40bc-bc25-d8876722dbc8/marketplace-operator/0.log" Feb 02 18:16:58 crc kubenswrapper[4835]: I0202 18:16:58.117165 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-utilities/0.log" Feb 02 18:16:58 crc kubenswrapper[4835]: I0202 18:16:58.362853 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-utilities/0.log" Feb 02 18:16:58 crc kubenswrapper[4835]: I0202 18:16:58.369653 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-content/0.log" Feb 02 18:16:58 crc kubenswrapper[4835]: I0202 18:16:58.419407 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-content/0.log" Feb 02 18:16:58 crc kubenswrapper[4835]: I0202 18:16:58.474459 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jssq8_66552ba5-2809-433e-b245-bc22ace6c699/registry-server/0.log" Feb 02 18:16:58 crc kubenswrapper[4835]: I0202 18:16:58.608747 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-utilities/0.log" Feb 02 18:16:58 crc kubenswrapper[4835]: I0202 18:16:58.608747 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/extract-content/0.log" Feb 02 18:16:58 crc kubenswrapper[4835]: I0202 18:16:58.813840 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-4mqtb_6f0070dd-ec0f-4bf2-b983-e2efe4c14a3a/registry-server/0.log" Feb 02 18:16:58 crc kubenswrapper[4835]: I0202 18:16:58.861210 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-utilities/0.log" Feb 02 18:16:59 crc kubenswrapper[4835]: I0202 18:16:59.526676 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-utilities/0.log" Feb 02 18:16:59 crc kubenswrapper[4835]: I0202 18:16:59.637721 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-content/0.log" Feb 02 18:16:59 crc kubenswrapper[4835]: I0202 18:16:59.647634 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-content/0.log" Feb 02 18:16:59 crc kubenswrapper[4835]: I0202 18:16:59.799715 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-utilities/0.log" Feb 02 18:16:59 crc kubenswrapper[4835]: I0202 18:16:59.800495 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/extract-content/0.log" Feb 02 18:17:00 crc kubenswrapper[4835]: I0202 18:17:00.507149 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xshph_2ca5614b-4fa3-4a18-b40f-64369990a74a/registry-server/0.log" Feb 02 18:17:01 crc kubenswrapper[4835]: I0202 18:17:01.861781 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jdq99"] Feb 02 18:17:01 crc kubenswrapper[4835]: E0202 18:17:01.862600 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e05d608a-9d3c-48c6-8aa7-8944bea12ff7" containerName="collect-profiles" Feb 02 18:17:01 crc kubenswrapper[4835]: I0202 18:17:01.862616 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="e05d608a-9d3c-48c6-8aa7-8944bea12ff7" containerName="collect-profiles" Feb 02 18:17:01 crc kubenswrapper[4835]: E0202 18:17:01.862642 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerName="registry-server" Feb 02 18:17:01 crc kubenswrapper[4835]: I0202 18:17:01.862650 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerName="registry-server" Feb 02 18:17:01 crc kubenswrapper[4835]: E0202 18:17:01.862673 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerName="extract-content" Feb 02 18:17:01 crc kubenswrapper[4835]: I0202 18:17:01.862681 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerName="extract-content" Feb 02 18:17:01 crc kubenswrapper[4835]: E0202 18:17:01.862707 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerName="extract-utilities" Feb 02 18:17:01 crc kubenswrapper[4835]: I0202 18:17:01.862714 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerName="extract-utilities" Feb 02 18:17:01 crc kubenswrapper[4835]: I0202 18:17:01.863155 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c6595c9-63e7-4395-9aaa-d0db09cf2cbf" containerName="registry-server" Feb 02 18:17:01 crc kubenswrapper[4835]: I0202 18:17:01.863181 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="e05d608a-9d3c-48c6-8aa7-8944bea12ff7" containerName="collect-profiles" Feb 02 18:17:01 crc kubenswrapper[4835]: I0202 18:17:01.866515 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:01 crc kubenswrapper[4835]: I0202 18:17:01.881959 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jdq99"] Feb 02 18:17:02 crc kubenswrapper[4835]: I0202 18:17:02.051703 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-catalog-content\") pod \"community-operators-jdq99\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:02 crc kubenswrapper[4835]: I0202 18:17:02.052023 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-utilities\") pod \"community-operators-jdq99\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:02 crc kubenswrapper[4835]: I0202 18:17:02.052081 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbxr9\" (UniqueName: \"kubernetes.io/projected/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-kube-api-access-sbxr9\") pod \"community-operators-jdq99\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:02 crc kubenswrapper[4835]: I0202 18:17:02.154583 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-catalog-content\") pod \"community-operators-jdq99\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:02 crc kubenswrapper[4835]: I0202 18:17:02.154645 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-utilities\") pod \"community-operators-jdq99\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:02 crc kubenswrapper[4835]: I0202 18:17:02.154725 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbxr9\" (UniqueName: \"kubernetes.io/projected/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-kube-api-access-sbxr9\") pod \"community-operators-jdq99\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:02 crc kubenswrapper[4835]: I0202 18:17:02.155223 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-catalog-content\") pod \"community-operators-jdq99\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:02 crc kubenswrapper[4835]: I0202 18:17:02.155256 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-utilities\") pod \"community-operators-jdq99\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:02 crc kubenswrapper[4835]: I0202 18:17:02.178168 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbxr9\" (UniqueName: \"kubernetes.io/projected/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-kube-api-access-sbxr9\") pod \"community-operators-jdq99\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:02 crc kubenswrapper[4835]: I0202 18:17:02.194820 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:02 crc kubenswrapper[4835]: I0202 18:17:02.712767 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jdq99"] Feb 02 18:17:03 crc kubenswrapper[4835]: I0202 18:17:03.644110 4835 generic.go:334] "Generic (PLEG): container finished" podID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" containerID="34211c5402d8a759cbd7c64c87172c0476e9dc2c68a78e33f44f08b6a4a7975c" exitCode=0 Feb 02 18:17:03 crc kubenswrapper[4835]: I0202 18:17:03.644254 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jdq99" event={"ID":"6169c0c8-66ee-4d19-9ebd-5f3016f9e227","Type":"ContainerDied","Data":"34211c5402d8a759cbd7c64c87172c0476e9dc2c68a78e33f44f08b6a4a7975c"} Feb 02 18:17:03 crc kubenswrapper[4835]: I0202 18:17:03.644515 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jdq99" event={"ID":"6169c0c8-66ee-4d19-9ebd-5f3016f9e227","Type":"ContainerStarted","Data":"97455578f1ebb5af74175f3a27bb4e6587e9843d9a88d3b244aeef0c83ccdf77"} Feb 02 18:17:04 crc kubenswrapper[4835]: I0202 18:17:04.654629 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jdq99" event={"ID":"6169c0c8-66ee-4d19-9ebd-5f3016f9e227","Type":"ContainerStarted","Data":"437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06"} Feb 02 18:17:06 crc kubenswrapper[4835]: I0202 18:17:06.680749 4835 generic.go:334] "Generic (PLEG): container finished" podID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" containerID="437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06" exitCode=0 Feb 02 18:17:06 crc kubenswrapper[4835]: I0202 18:17:06.680832 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jdq99" event={"ID":"6169c0c8-66ee-4d19-9ebd-5f3016f9e227","Type":"ContainerDied","Data":"437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06"} Feb 02 18:17:07 crc kubenswrapper[4835]: I0202 18:17:07.689509 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jdq99" event={"ID":"6169c0c8-66ee-4d19-9ebd-5f3016f9e227","Type":"ContainerStarted","Data":"2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c"} Feb 02 18:17:07 crc kubenswrapper[4835]: I0202 18:17:07.715922 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jdq99" podStartSLOduration=3.249536825 podStartE2EDuration="6.715900251s" podCreationTimestamp="2026-02-02 18:17:01 +0000 UTC" firstStartedPulling="2026-02-02 18:17:03.64596622 +0000 UTC m=+5215.267570320" lastFinishedPulling="2026-02-02 18:17:07.112329666 +0000 UTC m=+5218.733933746" observedRunningTime="2026-02-02 18:17:07.710320703 +0000 UTC m=+5219.331924783" watchObservedRunningTime="2026-02-02 18:17:07.715900251 +0000 UTC m=+5219.337504331" Feb 02 18:17:12 crc kubenswrapper[4835]: I0202 18:17:12.195650 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:12 crc kubenswrapper[4835]: I0202 18:17:12.196202 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:12 crc kubenswrapper[4835]: I0202 18:17:12.252882 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:12 crc kubenswrapper[4835]: I0202 18:17:12.792574 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:12 crc kubenswrapper[4835]: I0202 18:17:12.842885 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jdq99"] Feb 02 18:17:14 crc kubenswrapper[4835]: I0202 18:17:14.760905 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jdq99" podUID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" containerName="registry-server" containerID="cri-o://2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c" gracePeriod=2 Feb 02 18:17:14 crc kubenswrapper[4835]: I0202 18:17:14.870423 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:17:14 crc kubenswrapper[4835]: I0202 18:17:14.870723 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:17:15 crc kubenswrapper[4835]: E0202 18:17:15.159513 4835 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.245:47462->38.102.83.245:45039: read tcp 38.102.83.245:47462->38.102.83.245:45039: read: connection reset by peer Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.236453 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.327694 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbxr9\" (UniqueName: \"kubernetes.io/projected/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-kube-api-access-sbxr9\") pod \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.327987 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-utilities\") pod \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.328035 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-catalog-content\") pod \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\" (UID: \"6169c0c8-66ee-4d19-9ebd-5f3016f9e227\") " Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.334298 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-kube-api-access-sbxr9" (OuterVolumeSpecName: "kube-api-access-sbxr9") pod "6169c0c8-66ee-4d19-9ebd-5f3016f9e227" (UID: "6169c0c8-66ee-4d19-9ebd-5f3016f9e227"). InnerVolumeSpecName "kube-api-access-sbxr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.335744 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-utilities" (OuterVolumeSpecName: "utilities") pod "6169c0c8-66ee-4d19-9ebd-5f3016f9e227" (UID: "6169c0c8-66ee-4d19-9ebd-5f3016f9e227"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.432995 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.433028 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbxr9\" (UniqueName: \"kubernetes.io/projected/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-kube-api-access-sbxr9\") on node \"crc\" DevicePath \"\"" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.575314 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6169c0c8-66ee-4d19-9ebd-5f3016f9e227" (UID: "6169c0c8-66ee-4d19-9ebd-5f3016f9e227"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.636580 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6169c0c8-66ee-4d19-9ebd-5f3016f9e227-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.775804 4835 generic.go:334] "Generic (PLEG): container finished" podID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" containerID="2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c" exitCode=0 Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.775862 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jdq99" event={"ID":"6169c0c8-66ee-4d19-9ebd-5f3016f9e227","Type":"ContainerDied","Data":"2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c"} Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.775896 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jdq99" event={"ID":"6169c0c8-66ee-4d19-9ebd-5f3016f9e227","Type":"ContainerDied","Data":"97455578f1ebb5af74175f3a27bb4e6587e9843d9a88d3b244aeef0c83ccdf77"} Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.775926 4835 scope.go:117] "RemoveContainer" containerID="2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.776104 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jdq99" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.811992 4835 scope.go:117] "RemoveContainer" containerID="437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.831457 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jdq99"] Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.850198 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jdq99"] Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.897084 4835 scope.go:117] "RemoveContainer" containerID="34211c5402d8a759cbd7c64c87172c0476e9dc2c68a78e33f44f08b6a4a7975c" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.914430 4835 scope.go:117] "RemoveContainer" containerID="2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c" Feb 02 18:17:15 crc kubenswrapper[4835]: E0202 18:17:15.921809 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c\": container with ID starting with 2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c not found: ID does not exist" containerID="2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.922296 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c"} err="failed to get container status \"2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c\": rpc error: code = NotFound desc = could not find container \"2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c\": container with ID starting with 2f958bcb2e04a403876d3cadc8594096568ab71094793baff5423957f873379c not found: ID does not exist" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.922332 4835 scope.go:117] "RemoveContainer" containerID="437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06" Feb 02 18:17:15 crc kubenswrapper[4835]: E0202 18:17:15.925590 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06\": container with ID starting with 437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06 not found: ID does not exist" containerID="437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.925665 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06"} err="failed to get container status \"437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06\": rpc error: code = NotFound desc = could not find container \"437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06\": container with ID starting with 437e8a3bb3c1b923409787f1bc0813eb0e4e61ccd0dab526feb8e5b09432ec06 not found: ID does not exist" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.925699 4835 scope.go:117] "RemoveContainer" containerID="34211c5402d8a759cbd7c64c87172c0476e9dc2c68a78e33f44f08b6a4a7975c" Feb 02 18:17:15 crc kubenswrapper[4835]: E0202 18:17:15.929877 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34211c5402d8a759cbd7c64c87172c0476e9dc2c68a78e33f44f08b6a4a7975c\": container with ID starting with 34211c5402d8a759cbd7c64c87172c0476e9dc2c68a78e33f44f08b6a4a7975c not found: ID does not exist" containerID="34211c5402d8a759cbd7c64c87172c0476e9dc2c68a78e33f44f08b6a4a7975c" Feb 02 18:17:15 crc kubenswrapper[4835]: I0202 18:17:15.929925 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34211c5402d8a759cbd7c64c87172c0476e9dc2c68a78e33f44f08b6a4a7975c"} err="failed to get container status \"34211c5402d8a759cbd7c64c87172c0476e9dc2c68a78e33f44f08b6a4a7975c\": rpc error: code = NotFound desc = could not find container \"34211c5402d8a759cbd7c64c87172c0476e9dc2c68a78e33f44f08b6a4a7975c\": container with ID starting with 34211c5402d8a759cbd7c64c87172c0476e9dc2c68a78e33f44f08b6a4a7975c not found: ID does not exist" Feb 02 18:17:17 crc kubenswrapper[4835]: I0202 18:17:17.199095 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" path="/var/lib/kubelet/pods/6169c0c8-66ee-4d19-9ebd-5f3016f9e227/volumes" Feb 02 18:17:29 crc kubenswrapper[4835]: E0202 18:17:29.290327 4835 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.245:50156->38.102.83.245:45039: read tcp 38.102.83.245:50156->38.102.83.245:45039: read: connection reset by peer Feb 02 18:17:44 crc kubenswrapper[4835]: I0202 18:17:44.870697 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:17:44 crc kubenswrapper[4835]: I0202 18:17:44.871300 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.589887 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rj5bs"] Feb 02 18:18:05 crc kubenswrapper[4835]: E0202 18:18:05.590960 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" containerName="extract-content" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.591009 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" containerName="extract-content" Feb 02 18:18:05 crc kubenswrapper[4835]: E0202 18:18:05.591042 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" containerName="extract-utilities" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.591050 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" containerName="extract-utilities" Feb 02 18:18:05 crc kubenswrapper[4835]: E0202 18:18:05.591072 4835 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" containerName="registry-server" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.591088 4835 state_mem.go:107] "Deleted CPUSet assignment" podUID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" containerName="registry-server" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.591361 4835 memory_manager.go:354] "RemoveStaleState removing state" podUID="6169c0c8-66ee-4d19-9ebd-5f3016f9e227" containerName="registry-server" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.592998 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.602734 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rj5bs"] Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.738155 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-utilities\") pod \"certified-operators-rj5bs\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.738783 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pcvk\" (UniqueName: \"kubernetes.io/projected/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-kube-api-access-5pcvk\") pod \"certified-operators-rj5bs\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.739151 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-catalog-content\") pod \"certified-operators-rj5bs\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.841099 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-catalog-content\") pod \"certified-operators-rj5bs\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.841324 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-utilities\") pod \"certified-operators-rj5bs\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.841361 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pcvk\" (UniqueName: \"kubernetes.io/projected/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-kube-api-access-5pcvk\") pod \"certified-operators-rj5bs\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.841932 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-catalog-content\") pod \"certified-operators-rj5bs\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.842016 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-utilities\") pod \"certified-operators-rj5bs\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.863562 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pcvk\" (UniqueName: \"kubernetes.io/projected/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-kube-api-access-5pcvk\") pod \"certified-operators-rj5bs\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:05 crc kubenswrapper[4835]: I0202 18:18:05.954994 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:06 crc kubenswrapper[4835]: I0202 18:18:06.506512 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rj5bs"] Feb 02 18:18:07 crc kubenswrapper[4835]: I0202 18:18:07.269690 4835 generic.go:334] "Generic (PLEG): container finished" podID="fb90c3ac-10ca-4d6d-9c3b-9130501c80ab" containerID="8b9704ab53bf7ee99f8ede0b568e2fb2d09c953be1a21af38eb7f3aad71338b4" exitCode=0 Feb 02 18:18:07 crc kubenswrapper[4835]: I0202 18:18:07.269787 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rj5bs" event={"ID":"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab","Type":"ContainerDied","Data":"8b9704ab53bf7ee99f8ede0b568e2fb2d09c953be1a21af38eb7f3aad71338b4"} Feb 02 18:18:07 crc kubenswrapper[4835]: I0202 18:18:07.270086 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rj5bs" event={"ID":"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab","Type":"ContainerStarted","Data":"d01e5a183937e03d39c0216b5c5ead5c13c19dda13b0d6094d410e51f5f924cc"} Feb 02 18:18:08 crc kubenswrapper[4835]: I0202 18:18:08.279646 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rj5bs" event={"ID":"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab","Type":"ContainerStarted","Data":"23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770"} Feb 02 18:18:10 crc kubenswrapper[4835]: I0202 18:18:10.306913 4835 generic.go:334] "Generic (PLEG): container finished" podID="fb90c3ac-10ca-4d6d-9c3b-9130501c80ab" containerID="23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770" exitCode=0 Feb 02 18:18:10 crc kubenswrapper[4835]: I0202 18:18:10.307143 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rj5bs" event={"ID":"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab","Type":"ContainerDied","Data":"23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770"} Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.174421 4835 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4nrh8"] Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.188821 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.221470 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4nrh8"] Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.292087 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-catalog-content\") pod \"redhat-marketplace-4nrh8\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.292139 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-utilities\") pod \"redhat-marketplace-4nrh8\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.292306 4835 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvz5z\" (UniqueName: \"kubernetes.io/projected/634a455a-986c-4d77-b23b-ab29919db557-kube-api-access-nvz5z\") pod \"redhat-marketplace-4nrh8\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.318999 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rj5bs" event={"ID":"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab","Type":"ContainerStarted","Data":"7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307"} Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.342232 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rj5bs" podStartSLOduration=2.66001977 podStartE2EDuration="6.342210379s" podCreationTimestamp="2026-02-02 18:18:05 +0000 UTC" firstStartedPulling="2026-02-02 18:18:07.273058192 +0000 UTC m=+5278.894662272" lastFinishedPulling="2026-02-02 18:18:10.955248811 +0000 UTC m=+5282.576852881" observedRunningTime="2026-02-02 18:18:11.337375512 +0000 UTC m=+5282.958979592" watchObservedRunningTime="2026-02-02 18:18:11.342210379 +0000 UTC m=+5282.963814459" Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.393998 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-catalog-content\") pod \"redhat-marketplace-4nrh8\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.394077 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-utilities\") pod \"redhat-marketplace-4nrh8\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.394227 4835 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvz5z\" (UniqueName: \"kubernetes.io/projected/634a455a-986c-4d77-b23b-ab29919db557-kube-api-access-nvz5z\") pod \"redhat-marketplace-4nrh8\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.395206 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-catalog-content\") pod \"redhat-marketplace-4nrh8\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.396361 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-utilities\") pod \"redhat-marketplace-4nrh8\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.423927 4835 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvz5z\" (UniqueName: \"kubernetes.io/projected/634a455a-986c-4d77-b23b-ab29919db557-kube-api-access-nvz5z\") pod \"redhat-marketplace-4nrh8\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:11 crc kubenswrapper[4835]: I0202 18:18:11.519533 4835 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:12 crc kubenswrapper[4835]: I0202 18:18:12.054102 4835 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4nrh8"] Feb 02 18:18:12 crc kubenswrapper[4835]: W0202 18:18:12.064440 4835 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod634a455a_986c_4d77_b23b_ab29919db557.slice/crio-5893f8cc981ce54984929be23cf3d8a2c75d1cc58484ed0a8f1d94916019e2d4 WatchSource:0}: Error finding container 5893f8cc981ce54984929be23cf3d8a2c75d1cc58484ed0a8f1d94916019e2d4: Status 404 returned error can't find the container with id 5893f8cc981ce54984929be23cf3d8a2c75d1cc58484ed0a8f1d94916019e2d4 Feb 02 18:18:12 crc kubenswrapper[4835]: I0202 18:18:12.328552 4835 generic.go:334] "Generic (PLEG): container finished" podID="634a455a-986c-4d77-b23b-ab29919db557" containerID="77ee008ce23f1a97bac946586937a4d98b63326de226ffee86c99a19fda71f5c" exitCode=0 Feb 02 18:18:12 crc kubenswrapper[4835]: I0202 18:18:12.328650 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4nrh8" event={"ID":"634a455a-986c-4d77-b23b-ab29919db557","Type":"ContainerDied","Data":"77ee008ce23f1a97bac946586937a4d98b63326de226ffee86c99a19fda71f5c"} Feb 02 18:18:12 crc kubenswrapper[4835]: I0202 18:18:12.328854 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4nrh8" event={"ID":"634a455a-986c-4d77-b23b-ab29919db557","Type":"ContainerStarted","Data":"5893f8cc981ce54984929be23cf3d8a2c75d1cc58484ed0a8f1d94916019e2d4"} Feb 02 18:18:13 crc kubenswrapper[4835]: I0202 18:18:13.341081 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4nrh8" event={"ID":"634a455a-986c-4d77-b23b-ab29919db557","Type":"ContainerStarted","Data":"b74c7a34fa8b9f5e6fa840c7227987d81c2e21910ba60a586c89703188330809"} Feb 02 18:18:14 crc kubenswrapper[4835]: I0202 18:18:14.351921 4835 generic.go:334] "Generic (PLEG): container finished" podID="634a455a-986c-4d77-b23b-ab29919db557" containerID="b74c7a34fa8b9f5e6fa840c7227987d81c2e21910ba60a586c89703188330809" exitCode=0 Feb 02 18:18:14 crc kubenswrapper[4835]: I0202 18:18:14.351980 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4nrh8" event={"ID":"634a455a-986c-4d77-b23b-ab29919db557","Type":"ContainerDied","Data":"b74c7a34fa8b9f5e6fa840c7227987d81c2e21910ba60a586c89703188330809"} Feb 02 18:18:14 crc kubenswrapper[4835]: I0202 18:18:14.870519 4835 patch_prober.go:28] interesting pod/machine-config-daemon-94jlf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 18:18:14 crc kubenswrapper[4835]: I0202 18:18:14.870814 4835 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 18:18:14 crc kubenswrapper[4835]: I0202 18:18:14.870875 4835 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" Feb 02 18:18:14 crc kubenswrapper[4835]: I0202 18:18:14.871407 4835 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad"} pod="openshift-machine-config-operator/machine-config-daemon-94jlf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 18:18:14 crc kubenswrapper[4835]: I0202 18:18:14.871459 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerName="machine-config-daemon" containerID="cri-o://a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" gracePeriod=600 Feb 02 18:18:14 crc kubenswrapper[4835]: E0202 18:18:14.991957 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:18:15 crc kubenswrapper[4835]: I0202 18:18:15.372075 4835 generic.go:334] "Generic (PLEG): container finished" podID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" exitCode=0 Feb 02 18:18:15 crc kubenswrapper[4835]: I0202 18:18:15.372132 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerDied","Data":"a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad"} Feb 02 18:18:15 crc kubenswrapper[4835]: I0202 18:18:15.372453 4835 scope.go:117] "RemoveContainer" containerID="6de61775b6dfa0919621a117267aee27e136e485973852f07aa114fdff41e432" Feb 02 18:18:15 crc kubenswrapper[4835]: I0202 18:18:15.373503 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:18:15 crc kubenswrapper[4835]: E0202 18:18:15.373955 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:18:15 crc kubenswrapper[4835]: I0202 18:18:15.389239 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4nrh8" event={"ID":"634a455a-986c-4d77-b23b-ab29919db557","Type":"ContainerStarted","Data":"3a7c243de7fc6810da876ac914606b5a1f27b47ce1a7f531d4db0bf28c33e87e"} Feb 02 18:18:15 crc kubenswrapper[4835]: I0202 18:18:15.441857 4835 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4nrh8" podStartSLOduration=1.919328068 podStartE2EDuration="4.441836972s" podCreationTimestamp="2026-02-02 18:18:11 +0000 UTC" firstStartedPulling="2026-02-02 18:18:12.340960687 +0000 UTC m=+5283.962564767" lastFinishedPulling="2026-02-02 18:18:14.863469591 +0000 UTC m=+5286.485073671" observedRunningTime="2026-02-02 18:18:15.417002068 +0000 UTC m=+5287.038606168" watchObservedRunningTime="2026-02-02 18:18:15.441836972 +0000 UTC m=+5287.063441052" Feb 02 18:18:15 crc kubenswrapper[4835]: I0202 18:18:15.955395 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:15 crc kubenswrapper[4835]: I0202 18:18:15.955452 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:16 crc kubenswrapper[4835]: I0202 18:18:16.002254 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:16 crc kubenswrapper[4835]: I0202 18:18:16.456730 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:18 crc kubenswrapper[4835]: I0202 18:18:18.364397 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rj5bs"] Feb 02 18:18:18 crc kubenswrapper[4835]: I0202 18:18:18.418533 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rj5bs" podUID="fb90c3ac-10ca-4d6d-9c3b-9130501c80ab" containerName="registry-server" containerID="cri-o://7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307" gracePeriod=2 Feb 02 18:18:18 crc kubenswrapper[4835]: I0202 18:18:18.902055 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.051845 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-catalog-content\") pod \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.051967 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pcvk\" (UniqueName: \"kubernetes.io/projected/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-kube-api-access-5pcvk\") pod \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.052028 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-utilities\") pod \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\" (UID: \"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab\") " Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.053025 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-utilities" (OuterVolumeSpecName: "utilities") pod "fb90c3ac-10ca-4d6d-9c3b-9130501c80ab" (UID: "fb90c3ac-10ca-4d6d-9c3b-9130501c80ab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.059317 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-kube-api-access-5pcvk" (OuterVolumeSpecName: "kube-api-access-5pcvk") pod "fb90c3ac-10ca-4d6d-9c3b-9130501c80ab" (UID: "fb90c3ac-10ca-4d6d-9c3b-9130501c80ab"). InnerVolumeSpecName "kube-api-access-5pcvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.120656 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fb90c3ac-10ca-4d6d-9c3b-9130501c80ab" (UID: "fb90c3ac-10ca-4d6d-9c3b-9130501c80ab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.154872 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.154911 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pcvk\" (UniqueName: \"kubernetes.io/projected/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-kube-api-access-5pcvk\") on node \"crc\" DevicePath \"\"" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.154945 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.440896 4835 generic.go:334] "Generic (PLEG): container finished" podID="fb90c3ac-10ca-4d6d-9c3b-9130501c80ab" containerID="7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307" exitCode=0 Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.440975 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rj5bs" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.440996 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rj5bs" event={"ID":"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab","Type":"ContainerDied","Data":"7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307"} Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.442038 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rj5bs" event={"ID":"fb90c3ac-10ca-4d6d-9c3b-9130501c80ab","Type":"ContainerDied","Data":"d01e5a183937e03d39c0216b5c5ead5c13c19dda13b0d6094d410e51f5f924cc"} Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.442059 4835 scope.go:117] "RemoveContainer" containerID="7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.472596 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rj5bs"] Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.473458 4835 scope.go:117] "RemoveContainer" containerID="23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.488375 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rj5bs"] Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.495651 4835 scope.go:117] "RemoveContainer" containerID="8b9704ab53bf7ee99f8ede0b568e2fb2d09c953be1a21af38eb7f3aad71338b4" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.549964 4835 scope.go:117] "RemoveContainer" containerID="7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307" Feb 02 18:18:19 crc kubenswrapper[4835]: E0202 18:18:19.550412 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307\": container with ID starting with 7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307 not found: ID does not exist" containerID="7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.550440 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307"} err="failed to get container status \"7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307\": rpc error: code = NotFound desc = could not find container \"7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307\": container with ID starting with 7d139109fc80f508db262f941414d8a246ab8dee8c01c2a420b52a1a9da06307 not found: ID does not exist" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.550482 4835 scope.go:117] "RemoveContainer" containerID="23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770" Feb 02 18:18:19 crc kubenswrapper[4835]: E0202 18:18:19.550711 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770\": container with ID starting with 23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770 not found: ID does not exist" containerID="23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.550734 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770"} err="failed to get container status \"23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770\": rpc error: code = NotFound desc = could not find container \"23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770\": container with ID starting with 23f3864d94fdee0339dba1e602f18be050df8d2ff00e0627f75811f24afd3770 not found: ID does not exist" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.550749 4835 scope.go:117] "RemoveContainer" containerID="8b9704ab53bf7ee99f8ede0b568e2fb2d09c953be1a21af38eb7f3aad71338b4" Feb 02 18:18:19 crc kubenswrapper[4835]: E0202 18:18:19.550959 4835 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b9704ab53bf7ee99f8ede0b568e2fb2d09c953be1a21af38eb7f3aad71338b4\": container with ID starting with 8b9704ab53bf7ee99f8ede0b568e2fb2d09c953be1a21af38eb7f3aad71338b4 not found: ID does not exist" containerID="8b9704ab53bf7ee99f8ede0b568e2fb2d09c953be1a21af38eb7f3aad71338b4" Feb 02 18:18:19 crc kubenswrapper[4835]: I0202 18:18:19.550989 4835 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b9704ab53bf7ee99f8ede0b568e2fb2d09c953be1a21af38eb7f3aad71338b4"} err="failed to get container status \"8b9704ab53bf7ee99f8ede0b568e2fb2d09c953be1a21af38eb7f3aad71338b4\": rpc error: code = NotFound desc = could not find container \"8b9704ab53bf7ee99f8ede0b568e2fb2d09c953be1a21af38eb7f3aad71338b4\": container with ID starting with 8b9704ab53bf7ee99f8ede0b568e2fb2d09c953be1a21af38eb7f3aad71338b4 not found: ID does not exist" Feb 02 18:18:21 crc kubenswrapper[4835]: I0202 18:18:21.203921 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb90c3ac-10ca-4d6d-9c3b-9130501c80ab" path="/var/lib/kubelet/pods/fb90c3ac-10ca-4d6d-9c3b-9130501c80ab/volumes" Feb 02 18:18:21 crc kubenswrapper[4835]: I0202 18:18:21.520286 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:21 crc kubenswrapper[4835]: I0202 18:18:21.520572 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:21 crc kubenswrapper[4835]: I0202 18:18:21.569216 4835 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:22 crc kubenswrapper[4835]: I0202 18:18:22.531786 4835 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:22 crc kubenswrapper[4835]: I0202 18:18:22.772911 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4nrh8"] Feb 02 18:18:24 crc kubenswrapper[4835]: I0202 18:18:24.491774 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4nrh8" podUID="634a455a-986c-4d77-b23b-ab29919db557" containerName="registry-server" containerID="cri-o://3a7c243de7fc6810da876ac914606b5a1f27b47ce1a7f531d4db0bf28c33e87e" gracePeriod=2 Feb 02 18:18:25 crc kubenswrapper[4835]: I0202 18:18:25.501747 4835 generic.go:334] "Generic (PLEG): container finished" podID="634a455a-986c-4d77-b23b-ab29919db557" containerID="3a7c243de7fc6810da876ac914606b5a1f27b47ce1a7f531d4db0bf28c33e87e" exitCode=0 Feb 02 18:18:25 crc kubenswrapper[4835]: I0202 18:18:25.501810 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4nrh8" event={"ID":"634a455a-986c-4d77-b23b-ab29919db557","Type":"ContainerDied","Data":"3a7c243de7fc6810da876ac914606b5a1f27b47ce1a7f531d4db0bf28c33e87e"} Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.228912 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.307089 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvz5z\" (UniqueName: \"kubernetes.io/projected/634a455a-986c-4d77-b23b-ab29919db557-kube-api-access-nvz5z\") pod \"634a455a-986c-4d77-b23b-ab29919db557\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.307143 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-utilities\") pod \"634a455a-986c-4d77-b23b-ab29919db557\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.307256 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-catalog-content\") pod \"634a455a-986c-4d77-b23b-ab29919db557\" (UID: \"634a455a-986c-4d77-b23b-ab29919db557\") " Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.308046 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-utilities" (OuterVolumeSpecName: "utilities") pod "634a455a-986c-4d77-b23b-ab29919db557" (UID: "634a455a-986c-4d77-b23b-ab29919db557"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.327053 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/634a455a-986c-4d77-b23b-ab29919db557-kube-api-access-nvz5z" (OuterVolumeSpecName: "kube-api-access-nvz5z") pod "634a455a-986c-4d77-b23b-ab29919db557" (UID: "634a455a-986c-4d77-b23b-ab29919db557"). InnerVolumeSpecName "kube-api-access-nvz5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.370021 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "634a455a-986c-4d77-b23b-ab29919db557" (UID: "634a455a-986c-4d77-b23b-ab29919db557"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.408991 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvz5z\" (UniqueName: \"kubernetes.io/projected/634a455a-986c-4d77-b23b-ab29919db557-kube-api-access-nvz5z\") on node \"crc\" DevicePath \"\"" Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.409027 4835 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.409036 4835 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/634a455a-986c-4d77-b23b-ab29919db557-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.512160 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4nrh8" event={"ID":"634a455a-986c-4d77-b23b-ab29919db557","Type":"ContainerDied","Data":"5893f8cc981ce54984929be23cf3d8a2c75d1cc58484ed0a8f1d94916019e2d4"} Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.512215 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4nrh8" Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.512232 4835 scope.go:117] "RemoveContainer" containerID="3a7c243de7fc6810da876ac914606b5a1f27b47ce1a7f531d4db0bf28c33e87e" Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.531319 4835 scope.go:117] "RemoveContainer" containerID="b74c7a34fa8b9f5e6fa840c7227987d81c2e21910ba60a586c89703188330809" Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.546112 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4nrh8"] Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.554111 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4nrh8"] Feb 02 18:18:26 crc kubenswrapper[4835]: I0202 18:18:26.589937 4835 scope.go:117] "RemoveContainer" containerID="77ee008ce23f1a97bac946586937a4d98b63326de226ffee86c99a19fda71f5c" Feb 02 18:18:27 crc kubenswrapper[4835]: I0202 18:18:27.205724 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="634a455a-986c-4d77-b23b-ab29919db557" path="/var/lib/kubelet/pods/634a455a-986c-4d77-b23b-ab29919db557/volumes" Feb 02 18:18:29 crc kubenswrapper[4835]: I0202 18:18:29.199048 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:18:29 crc kubenswrapper[4835]: E0202 18:18:29.199484 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:18:42 crc kubenswrapper[4835]: I0202 18:18:42.188881 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:18:42 crc kubenswrapper[4835]: E0202 18:18:42.190024 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:18:56 crc kubenswrapper[4835]: I0202 18:18:56.189658 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:18:56 crc kubenswrapper[4835]: E0202 18:18:56.190367 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:19:06 crc kubenswrapper[4835]: I0202 18:19:06.883815 4835 generic.go:334] "Generic (PLEG): container finished" podID="13b4415f-4940-4f96-a40b-51262b480e89" containerID="c3898ec793c1d1d3afcfe98e0b8d81f5ca30426cba9714d997d6a6ff5a89938a" exitCode=0 Feb 02 18:19:06 crc kubenswrapper[4835]: I0202 18:19:06.883903 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-55f57/must-gather-62x9s" event={"ID":"13b4415f-4940-4f96-a40b-51262b480e89","Type":"ContainerDied","Data":"c3898ec793c1d1d3afcfe98e0b8d81f5ca30426cba9714d997d6a6ff5a89938a"} Feb 02 18:19:06 crc kubenswrapper[4835]: I0202 18:19:06.885181 4835 scope.go:117] "RemoveContainer" containerID="c3898ec793c1d1d3afcfe98e0b8d81f5ca30426cba9714d997d6a6ff5a89938a" Feb 02 18:19:07 crc kubenswrapper[4835]: I0202 18:19:07.324728 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-55f57_must-gather-62x9s_13b4415f-4940-4f96-a40b-51262b480e89/gather/0.log" Feb 02 18:19:11 crc kubenswrapper[4835]: I0202 18:19:11.189235 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:19:11 crc kubenswrapper[4835]: E0202 18:19:11.190303 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:19:18 crc kubenswrapper[4835]: I0202 18:19:18.808071 4835 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-55f57/must-gather-62x9s"] Feb 02 18:19:18 crc kubenswrapper[4835]: I0202 18:19:18.808765 4835 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-55f57/must-gather-62x9s" podUID="13b4415f-4940-4f96-a40b-51262b480e89" containerName="copy" containerID="cri-o://4d38f1d4e3ed93c7f0455ff9826c8b467dcaae99e4ac9f7074dfc361d7a2351e" gracePeriod=2 Feb 02 18:19:18 crc kubenswrapper[4835]: I0202 18:19:18.817098 4835 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-55f57/must-gather-62x9s"] Feb 02 18:19:18 crc kubenswrapper[4835]: I0202 18:19:18.991395 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-55f57_must-gather-62x9s_13b4415f-4940-4f96-a40b-51262b480e89/copy/0.log" Feb 02 18:19:18 crc kubenswrapper[4835]: I0202 18:19:18.991823 4835 generic.go:334] "Generic (PLEG): container finished" podID="13b4415f-4940-4f96-a40b-51262b480e89" containerID="4d38f1d4e3ed93c7f0455ff9826c8b467dcaae99e4ac9f7074dfc361d7a2351e" exitCode=143 Feb 02 18:19:19 crc kubenswrapper[4835]: I0202 18:19:19.241849 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-55f57_must-gather-62x9s_13b4415f-4940-4f96-a40b-51262b480e89/copy/0.log" Feb 02 18:19:19 crc kubenswrapper[4835]: I0202 18:19:19.242497 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/must-gather-62x9s" Feb 02 18:19:19 crc kubenswrapper[4835]: I0202 18:19:19.287600 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/13b4415f-4940-4f96-a40b-51262b480e89-must-gather-output\") pod \"13b4415f-4940-4f96-a40b-51262b480e89\" (UID: \"13b4415f-4940-4f96-a40b-51262b480e89\") " Feb 02 18:19:19 crc kubenswrapper[4835]: I0202 18:19:19.287809 4835 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5xdp\" (UniqueName: \"kubernetes.io/projected/13b4415f-4940-4f96-a40b-51262b480e89-kube-api-access-w5xdp\") pod \"13b4415f-4940-4f96-a40b-51262b480e89\" (UID: \"13b4415f-4940-4f96-a40b-51262b480e89\") " Feb 02 18:19:19 crc kubenswrapper[4835]: I0202 18:19:19.312044 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13b4415f-4940-4f96-a40b-51262b480e89-kube-api-access-w5xdp" (OuterVolumeSpecName: "kube-api-access-w5xdp") pod "13b4415f-4940-4f96-a40b-51262b480e89" (UID: "13b4415f-4940-4f96-a40b-51262b480e89"). InnerVolumeSpecName "kube-api-access-w5xdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 18:19:19 crc kubenswrapper[4835]: I0202 18:19:19.390351 4835 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5xdp\" (UniqueName: \"kubernetes.io/projected/13b4415f-4940-4f96-a40b-51262b480e89-kube-api-access-w5xdp\") on node \"crc\" DevicePath \"\"" Feb 02 18:19:19 crc kubenswrapper[4835]: I0202 18:19:19.468538 4835 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13b4415f-4940-4f96-a40b-51262b480e89-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "13b4415f-4940-4f96-a40b-51262b480e89" (UID: "13b4415f-4940-4f96-a40b-51262b480e89"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 18:19:19 crc kubenswrapper[4835]: I0202 18:19:19.491877 4835 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/13b4415f-4940-4f96-a40b-51262b480e89-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 02 18:19:20 crc kubenswrapper[4835]: I0202 18:19:20.022941 4835 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-55f57_must-gather-62x9s_13b4415f-4940-4f96-a40b-51262b480e89/copy/0.log" Feb 02 18:19:20 crc kubenswrapper[4835]: I0202 18:19:20.027810 4835 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-55f57/must-gather-62x9s" Feb 02 18:19:20 crc kubenswrapper[4835]: I0202 18:19:20.027965 4835 scope.go:117] "RemoveContainer" containerID="4d38f1d4e3ed93c7f0455ff9826c8b467dcaae99e4ac9f7074dfc361d7a2351e" Feb 02 18:19:20 crc kubenswrapper[4835]: I0202 18:19:20.046967 4835 scope.go:117] "RemoveContainer" containerID="c3898ec793c1d1d3afcfe98e0b8d81f5ca30426cba9714d997d6a6ff5a89938a" Feb 02 18:19:21 crc kubenswrapper[4835]: I0202 18:19:21.200446 4835 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13b4415f-4940-4f96-a40b-51262b480e89" path="/var/lib/kubelet/pods/13b4415f-4940-4f96-a40b-51262b480e89/volumes" Feb 02 18:19:22 crc kubenswrapper[4835]: I0202 18:19:22.189587 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:19:22 crc kubenswrapper[4835]: E0202 18:19:22.190254 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:19:34 crc kubenswrapper[4835]: I0202 18:19:34.189441 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:19:34 crc kubenswrapper[4835]: E0202 18:19:34.190228 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:19:47 crc kubenswrapper[4835]: I0202 18:19:47.188994 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:19:47 crc kubenswrapper[4835]: E0202 18:19:47.189786 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:19:59 crc kubenswrapper[4835]: I0202 18:19:59.194695 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:19:59 crc kubenswrapper[4835]: E0202 18:19:59.195534 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:20:13 crc kubenswrapper[4835]: I0202 18:20:13.189896 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:20:13 crc kubenswrapper[4835]: E0202 18:20:13.190797 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:20:15 crc kubenswrapper[4835]: I0202 18:20:15.855972 4835 scope.go:117] "RemoveContainer" containerID="9b262a7058f6bf7957ad93d92efb5345f0de618933963a2faf5de92aab7f73d5" Feb 02 18:20:26 crc kubenswrapper[4835]: I0202 18:20:26.190292 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:20:26 crc kubenswrapper[4835]: E0202 18:20:26.191115 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:20:38 crc kubenswrapper[4835]: I0202 18:20:38.189655 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:20:38 crc kubenswrapper[4835]: E0202 18:20:38.190630 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:20:53 crc kubenswrapper[4835]: I0202 18:20:53.189409 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:20:53 crc kubenswrapper[4835]: E0202 18:20:53.190197 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:21:05 crc kubenswrapper[4835]: I0202 18:21:05.189541 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:21:05 crc kubenswrapper[4835]: E0202 18:21:05.190466 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:21:20 crc kubenswrapper[4835]: I0202 18:21:20.188856 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:21:20 crc kubenswrapper[4835]: E0202 18:21:20.190473 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:21:33 crc kubenswrapper[4835]: I0202 18:21:33.188600 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:21:33 crc kubenswrapper[4835]: E0202 18:21:33.189422 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:21:46 crc kubenswrapper[4835]: I0202 18:21:46.188611 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:21:46 crc kubenswrapper[4835]: E0202 18:21:46.189506 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:22:00 crc kubenswrapper[4835]: I0202 18:22:00.189290 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:22:00 crc kubenswrapper[4835]: E0202 18:22:00.190193 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:22:12 crc kubenswrapper[4835]: I0202 18:22:12.188441 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:22:12 crc kubenswrapper[4835]: E0202 18:22:12.190171 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:22:25 crc kubenswrapper[4835]: I0202 18:22:25.189015 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:22:25 crc kubenswrapper[4835]: E0202 18:22:25.189836 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:22:37 crc kubenswrapper[4835]: I0202 18:22:37.189440 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:22:37 crc kubenswrapper[4835]: E0202 18:22:37.190355 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:22:51 crc kubenswrapper[4835]: I0202 18:22:51.188730 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:22:51 crc kubenswrapper[4835]: E0202 18:22:51.189535 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:23:06 crc kubenswrapper[4835]: I0202 18:23:06.188494 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:23:06 crc kubenswrapper[4835]: E0202 18:23:06.189428 4835 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-94jlf_openshift-machine-config-operator(d878a5fb-e7f6-4458-8bcc-119bf67ad45a)\"" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" podUID="d878a5fb-e7f6-4458-8bcc-119bf67ad45a" Feb 02 18:23:17 crc kubenswrapper[4835]: I0202 18:23:17.190570 4835 scope.go:117] "RemoveContainer" containerID="a550cbe817ca59288bf5c3b44ed840b3dc9240cf3c2586a7125857982633f2ad" Feb 02 18:23:18 crc kubenswrapper[4835]: I0202 18:23:18.114484 4835 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-94jlf" event={"ID":"d878a5fb-e7f6-4458-8bcc-119bf67ad45a","Type":"ContainerStarted","Data":"c7ee31bec0dee3188c2dc422bd8f558c09e2c63cd9d405ff0776dec6b9f29472"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515140165722024450 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015140165723017366 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015140152371016504 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015140152371015454 5ustar corecore